From d8aa6998a1ed4e68762d53f3843da93f15fa6595 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Wed, 20 Dec 2023 10:05:43 -0500 Subject: [PATCH 01/17] remove indexer, ingester, daemon, moderation services from appview --- .../com/atproto/admin/emitModerationEvent.ts | 220 ------ .../com/atproto/admin/getModerationEvent.ts | 19 - .../src/api/com/atproto/admin/getRecord.ts | 39 -- .../bsky/src/api/com/atproto/admin/getRepo.ts | 32 - .../atproto/admin/queryModerationEvents.ts | 38 - .../atproto/admin/queryModerationStatuses.ts | 55 -- .../src/api/com/atproto/admin/searchRepos.ts | 27 - .../bsky/src/api/com/atproto/admin/util.ts | 52 -- .../com/atproto/moderation/createReport.ts | 42 -- .../src/api/com/atproto/moderation/util.ts | 89 --- packages/bsky/src/auto-moderator/abyss.ts | 114 --- .../bsky/src/auto-moderator/fuzzy-matcher.ts | 126 ---- packages/bsky/src/auto-moderator/hive.ts | 187 ----- packages/bsky/src/auto-moderator/index.ts | 302 -------- packages/bsky/src/auto-moderator/keyword.ts | 25 - packages/bsky/src/auto-moderator/util.ts | 138 ---- packages/bsky/src/daemon/config.ts | 50 -- packages/bsky/src/daemon/context.ts | 27 - packages/bsky/src/daemon/index.ts | 79 --- packages/bsky/src/daemon/logger.ts | 6 - packages/bsky/src/daemon/notifications.ts | 50 -- packages/bsky/src/daemon/services.ts | 20 - .../db/periodic-moderation-event-reversal.ts | 125 ---- packages/bsky/src/indexer/config.ts | 263 ------- packages/bsky/src/indexer/context.ts | 57 -- packages/bsky/src/indexer/index.ts | 148 ---- packages/bsky/src/indexer/logger.ts | 6 - packages/bsky/src/indexer/server.ts | 46 -- packages/bsky/src/indexer/services.ts | 32 - packages/bsky/src/indexer/subscription.ts | 345 --------- packages/bsky/src/ingester/config.ts | 141 ---- packages/bsky/src/ingester/context.ts | 27 - packages/bsky/src/ingester/index.ts | 79 --- packages/bsky/src/ingester/logger.ts | 6 - packages/bsky/src/ingester/subscription.ts | 288 -------- packages/bsky/src/migrate-moderation-data.ts | 414 ----------- .../bsky/src/services/moderation/index.ts | 656 ------------------ .../src/services/moderation/pagination.ts | 96 --- .../bsky/src/services/moderation/status.ts | 244 ------- .../bsky/src/services/moderation/types.ts | 49 -- .../bsky/src/services/moderation/views.ts | 549 --------------- services/bsky/daemon.js | 44 -- services/bsky/indexer.js | 110 --- services/bsky/ingester.js | 75 -- 44 files changed, 5537 deletions(-) delete mode 100644 packages/bsky/src/api/com/atproto/admin/emitModerationEvent.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/getModerationEvent.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/getRecord.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/getRepo.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/queryModerationEvents.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/queryModerationStatuses.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/searchRepos.ts delete mode 100644 packages/bsky/src/api/com/atproto/admin/util.ts delete mode 100644 packages/bsky/src/api/com/atproto/moderation/createReport.ts delete mode 100644 packages/bsky/src/api/com/atproto/moderation/util.ts delete mode 100644 packages/bsky/src/auto-moderator/abyss.ts delete mode 100644 packages/bsky/src/auto-moderator/fuzzy-matcher.ts delete mode 100644 packages/bsky/src/auto-moderator/hive.ts delete mode 100644 packages/bsky/src/auto-moderator/index.ts delete mode 100644 packages/bsky/src/auto-moderator/keyword.ts delete mode 100644 packages/bsky/src/auto-moderator/util.ts delete mode 100644 packages/bsky/src/daemon/config.ts delete mode 100644 packages/bsky/src/daemon/context.ts delete mode 100644 packages/bsky/src/daemon/index.ts delete mode 100644 packages/bsky/src/daemon/logger.ts delete mode 100644 packages/bsky/src/daemon/notifications.ts delete mode 100644 packages/bsky/src/daemon/services.ts delete mode 100644 packages/bsky/src/db/periodic-moderation-event-reversal.ts delete mode 100644 packages/bsky/src/indexer/config.ts delete mode 100644 packages/bsky/src/indexer/context.ts delete mode 100644 packages/bsky/src/indexer/index.ts delete mode 100644 packages/bsky/src/indexer/logger.ts delete mode 100644 packages/bsky/src/indexer/server.ts delete mode 100644 packages/bsky/src/indexer/services.ts delete mode 100644 packages/bsky/src/indexer/subscription.ts delete mode 100644 packages/bsky/src/ingester/config.ts delete mode 100644 packages/bsky/src/ingester/context.ts delete mode 100644 packages/bsky/src/ingester/index.ts delete mode 100644 packages/bsky/src/ingester/logger.ts delete mode 100644 packages/bsky/src/ingester/subscription.ts delete mode 100644 packages/bsky/src/migrate-moderation-data.ts delete mode 100644 packages/bsky/src/services/moderation/index.ts delete mode 100644 packages/bsky/src/services/moderation/pagination.ts delete mode 100644 packages/bsky/src/services/moderation/status.ts delete mode 100644 packages/bsky/src/services/moderation/types.ts delete mode 100644 packages/bsky/src/services/moderation/views.ts delete mode 100644 services/bsky/daemon.js delete mode 100644 services/bsky/indexer.js delete mode 100644 services/bsky/ingester.js diff --git a/packages/bsky/src/api/com/atproto/admin/emitModerationEvent.ts b/packages/bsky/src/api/com/atproto/admin/emitModerationEvent.ts deleted file mode 100644 index 8b007f64ca1..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/emitModerationEvent.ts +++ /dev/null @@ -1,220 +0,0 @@ -import { CID } from 'multiformats/cid' -import { AtUri } from '@atproto/syntax' -import { - AuthRequiredError, - InvalidRequestError, - UpstreamFailureError, -} from '@atproto/xrpc-server' -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' -import { getSubject } from '../moderation/util' -import { - isModEventLabel, - isModEventReverseTakedown, - isModEventTakedown, -} from '../../../../lexicon/types/com/atproto/admin/defs' -import { TakedownSubjects } from '../../../../services/moderation' -import { retryHttp } from '../../../../util/retry' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.emitModerationEvent({ - auth: ctx.roleVerifier, - handler: async ({ input, auth }) => { - const access = auth.credentials - const db = ctx.db.getPrimary() - const moderationService = ctx.services.moderation(db) - const { subject, createdBy, subjectBlobCids, event } = input.body - const isTakedownEvent = isModEventTakedown(event) - const isReverseTakedownEvent = isModEventReverseTakedown(event) - const isLabelEvent = isModEventLabel(event) - - // apply access rules - - // if less than moderator access then can not takedown an account - if (!access.moderator && isTakedownEvent && 'did' in subject) { - throw new AuthRequiredError( - 'Must be a full moderator to perform an account takedown', - ) - } - // if less than moderator access then can only take ack and escalation actions - if (!access.moderator && (isTakedownEvent || isReverseTakedownEvent)) { - throw new AuthRequiredError( - 'Must be a full moderator to take this type of action', - ) - } - // if less than moderator access then can not apply labels - if (!access.moderator && isLabelEvent) { - throw new AuthRequiredError('Must be a full moderator to label content') - } - - if (isLabelEvent) { - validateLabels([ - ...(event.createLabelVals ?? []), - ...(event.negateLabelVals ?? []), - ]) - } - - const subjectInfo = getSubject(subject) - - if (isTakedownEvent || isReverseTakedownEvent) { - const isSubjectTakendown = await moderationService.isSubjectTakendown( - subjectInfo, - ) - - if (isSubjectTakendown && isTakedownEvent) { - throw new InvalidRequestError(`Subject is already taken down`) - } - - if (!isSubjectTakendown && isReverseTakedownEvent) { - throw new InvalidRequestError(`Subject is not taken down`) - } - } - - const { result: moderationEvent, takenDown } = await db.transaction( - async (dbTxn) => { - const moderationTxn = ctx.services.moderation(dbTxn) - const labelTxn = ctx.services.label(dbTxn) - - const result = await moderationTxn.logEvent({ - event, - subject: subjectInfo, - subjectBlobCids: - subjectBlobCids?.map((cid) => CID.parse(cid)) ?? [], - createdBy, - }) - - let takenDown: TakedownSubjects | undefined - - if ( - result.subjectType === 'com.atproto.admin.defs#repoRef' && - result.subjectDid - ) { - // No credentials to revoke on appview - if (isTakedownEvent) { - takenDown = await moderationTxn.takedownRepo({ - takedownId: result.id, - did: result.subjectDid, - }) - } - - if (isReverseTakedownEvent) { - await moderationTxn.reverseTakedownRepo({ - did: result.subjectDid, - }) - takenDown = { - subjects: [ - { - $type: 'com.atproto.admin.defs#repoRef', - did: result.subjectDid, - }, - ], - did: result.subjectDid, - } - } - } - - if ( - result.subjectType === 'com.atproto.repo.strongRef' && - result.subjectUri - ) { - const blobCids = subjectBlobCids?.map((cid) => CID.parse(cid)) ?? [] - if (isTakedownEvent) { - takenDown = await moderationTxn.takedownRecord({ - takedownId: result.id, - uri: new AtUri(result.subjectUri), - // TODO: I think this will always be available for strongRefs? - cid: CID.parse(result.subjectCid as string), - blobCids, - }) - } - - if (isReverseTakedownEvent) { - await moderationTxn.reverseTakedownRecord({ - uri: new AtUri(result.subjectUri), - }) - takenDown = { - did: result.subjectDid, - subjects: [ - { - $type: 'com.atproto.repo.strongRef', - uri: result.subjectUri, - cid: result.subjectCid ?? '', - }, - ...blobCids.map((cid) => ({ - $type: 'com.atproto.admin.defs#repoBlobRef', - did: result.subjectDid, - cid: cid.toString(), - recordUri: result.subjectUri, - })), - ], - } - } - } - - if (isLabelEvent) { - await labelTxn.formatAndCreate( - ctx.cfg.labelerDid, - result.subjectUri ?? result.subjectDid, - result.subjectCid, - { - create: result.createLabelVals?.length - ? result.createLabelVals.split(' ') - : undefined, - negate: result.negateLabelVals?.length - ? result.negateLabelVals.split(' ') - : undefined, - }, - ) - } - - return { result, takenDown } - }, - ) - - if (takenDown && ctx.moderationPushAgent) { - const { did, subjects } = takenDown - if (did && subjects.length > 0) { - const agent = ctx.moderationPushAgent - const results = await Promise.allSettled( - subjects.map((subject) => - retryHttp(() => - agent.api.com.atproto.admin.updateSubjectStatus({ - subject, - takedown: isTakedownEvent - ? { - applied: true, - ref: moderationEvent.id.toString(), - } - : { - applied: false, - }, - }), - ), - ), - ) - const hadFailure = results.some((r) => r.status === 'rejected') - if (hadFailure) { - throw new UpstreamFailureError('failed to apply action on PDS') - } - } - } - - return { - encoding: 'application/json', - body: await moderationService.views.event(moderationEvent), - } - }, - }) -} - -const validateLabels = (labels: string[]) => { - for (const label of labels) { - for (const char of badChars) { - if (label.includes(char)) { - throw new InvalidRequestError(`Invalid label: ${label}`) - } - } - } -} - -const badChars = [' ', ',', ';', `'`, `"`] diff --git a/packages/bsky/src/api/com/atproto/admin/getModerationEvent.ts b/packages/bsky/src/api/com/atproto/admin/getModerationEvent.ts deleted file mode 100644 index 347a450c727..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/getModerationEvent.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.getModerationEvent({ - auth: ctx.roleVerifier, - handler: async ({ params }) => { - const { id } = params - const db = ctx.db.getPrimary() - const moderationService = ctx.services.moderation(db) - const event = await moderationService.getEventOrThrow(id) - const eventDetail = await moderationService.views.eventDetail(event) - return { - encoding: 'application/json', - body: eventDetail, - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/admin/getRecord.ts b/packages/bsky/src/api/com/atproto/admin/getRecord.ts deleted file mode 100644 index 8e459910806..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/getRecord.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' -import { addAccountInfoToRepoView, getPdsAccountInfo } from './util' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.getRecord({ - auth: ctx.roleVerifier, - handler: async ({ params, auth }) => { - const { uri, cid } = params - const db = ctx.db.getPrimary() - const result = await db.db - .selectFrom('record') - .selectAll() - .where('uri', '=', uri) - .if(!!cid, (qb) => qb.where('cid', '=', cid ?? '')) - .executeTakeFirst() - if (!result) { - throw new InvalidRequestError('Record not found', 'RecordNotFound') - } - - const [record, accountInfo] = await Promise.all([ - ctx.services.moderation(db).views.recordDetail(result), - getPdsAccountInfo(ctx, result.did), - ]) - - record.repo = addAccountInfoToRepoView( - record.repo, - accountInfo, - auth.credentials.moderator, - ) - - return { - encoding: 'application/json', - body: record, - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/admin/getRepo.ts b/packages/bsky/src/api/com/atproto/admin/getRepo.ts deleted file mode 100644 index 314b345b5e9..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/getRepo.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' -import { addAccountInfoToRepoViewDetail, getPdsAccountInfo } from './util' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.getRepo({ - auth: ctx.roleVerifier, - handler: async ({ params, auth }) => { - const { did } = params - const db = ctx.db.getPrimary() - const result = await ctx.services.actor(db).getActor(did, true) - if (!result) { - throw new InvalidRequestError('Repo not found', 'RepoNotFound') - } - const [partialRepo, accountInfo] = await Promise.all([ - ctx.services.moderation(db).views.repoDetail(result), - getPdsAccountInfo(ctx, result.did), - ]) - - const repo = addAccountInfoToRepoViewDetail( - partialRepo, - accountInfo, - auth.credentials.moderator, - ) - return { - encoding: 'application/json', - body: repo, - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/admin/queryModerationEvents.ts b/packages/bsky/src/api/com/atproto/admin/queryModerationEvents.ts deleted file mode 100644 index 1868533295c..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/queryModerationEvents.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' -import { getEventType } from '../moderation/util' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.queryModerationEvents({ - auth: ctx.roleVerifier, - handler: async ({ params }) => { - const { - subject, - limit = 50, - cursor, - sortDirection = 'desc', - types, - includeAllUserRecords = false, - createdBy, - } = params - const db = ctx.db.getPrimary() - const moderationService = ctx.services.moderation(db) - const results = await moderationService.getEvents({ - types: types?.length ? types.map(getEventType) : [], - subject, - createdBy, - limit, - cursor, - sortDirection, - includeAllUserRecords, - }) - return { - encoding: 'application/json', - body: { - cursor: results.cursor, - events: await moderationService.views.event(results.events), - }, - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/admin/queryModerationStatuses.ts b/packages/bsky/src/api/com/atproto/admin/queryModerationStatuses.ts deleted file mode 100644 index 5a74bfca3ae..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/queryModerationStatuses.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' -import { getReviewState } from '../moderation/util' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.queryModerationStatuses({ - auth: ctx.roleVerifier, - handler: async ({ params }) => { - const { - subject, - takendown, - reviewState, - reviewedAfter, - reviewedBefore, - reportedAfter, - reportedBefore, - ignoreSubjects, - lastReviewedBy, - sortDirection = 'desc', - sortField = 'lastReportedAt', - includeMuted = false, - limit = 50, - cursor, - } = params - const db = ctx.db.getPrimary() - const moderationService = ctx.services.moderation(db) - const results = await moderationService.getSubjectStatuses({ - reviewState: getReviewState(reviewState), - subject, - takendown, - reviewedAfter, - reviewedBefore, - reportedAfter, - reportedBefore, - includeMuted, - ignoreSubjects, - sortDirection, - lastReviewedBy, - sortField, - limit, - cursor, - }) - const subjectStatuses = moderationService.views.subjectStatus( - results.statuses, - ) - return { - encoding: 'application/json', - body: { - cursor: results.cursor, - subjectStatuses, - }, - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/admin/searchRepos.ts b/packages/bsky/src/api/com/atproto/admin/searchRepos.ts deleted file mode 100644 index ef580f30d67..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/searchRepos.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.admin.searchRepos({ - auth: ctx.roleVerifier, - handler: async ({ params }) => { - const db = ctx.db.getPrimary() - const moderationService = ctx.services.moderation(db) - const { limit, cursor } = params - // prefer new 'q' query param over deprecated 'term' - const query = params.q ?? params.term - - const { results, cursor: resCursor } = await ctx.services - .actor(db) - .getSearchResults({ query, limit, cursor, includeSoftDeleted: true }) - - return { - encoding: 'application/json', - body: { - cursor: resCursor, - repos: await moderationService.views.repo(results), - }, - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/admin/util.ts b/packages/bsky/src/api/com/atproto/admin/util.ts deleted file mode 100644 index 7dfd10cce5c..00000000000 --- a/packages/bsky/src/api/com/atproto/admin/util.ts +++ /dev/null @@ -1,52 +0,0 @@ -import AppContext from '../../../../context' -import { - RepoView, - RepoViewDetail, - AccountView, -} from '../../../../lexicon/types/com/atproto/admin/defs' - -export const getPdsAccountInfo = async ( - ctx: AppContext, - did: string, -): Promise => { - const agent = ctx.moderationPushAgent - if (!agent) return null - try { - const res = await agent.api.com.atproto.admin.getAccountInfo({ did }) - return res.data - } catch (err) { - return null - } -} - -export const addAccountInfoToRepoViewDetail = ( - repoView: RepoViewDetail, - accountInfo: AccountView | null, - includeEmail = false, -): RepoViewDetail => { - if (!accountInfo) return repoView - return { - ...repoView, - email: includeEmail ? accountInfo.email : undefined, - invitedBy: accountInfo.invitedBy, - invitesDisabled: accountInfo.invitesDisabled, - inviteNote: accountInfo.inviteNote, - invites: accountInfo.invites, - emailConfirmedAt: accountInfo.emailConfirmedAt, - } -} - -export const addAccountInfoToRepoView = ( - repoView: RepoView, - accountInfo: AccountView | null, - includeEmail = false, -): RepoView => { - if (!accountInfo) return repoView - return { - ...repoView, - email: includeEmail ? accountInfo.email : undefined, - invitedBy: accountInfo.invitedBy, - invitesDisabled: accountInfo.invitesDisabled, - inviteNote: accountInfo.inviteNote, - } -} diff --git a/packages/bsky/src/api/com/atproto/moderation/createReport.ts b/packages/bsky/src/api/com/atproto/moderation/createReport.ts deleted file mode 100644 index b247a319527..00000000000 --- a/packages/bsky/src/api/com/atproto/moderation/createReport.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { AuthRequiredError } from '@atproto/xrpc-server' -import { Server } from '../../../../lexicon' -import AppContext from '../../../../context' -import { getReasonType, getSubject } from './util' -import { softDeleted } from '../../../../db/util' - -export default function (server: Server, ctx: AppContext) { - server.com.atproto.moderation.createReport({ - // @TODO anonymous reports w/ optional auth are a temporary measure - auth: ctx.authOptionalVerifier, - handler: async ({ input, auth }) => { - const { reasonType, reason, subject } = input.body - const requester = auth.credentials.did - - const db = ctx.db.getPrimary() - - if (requester) { - // Don't accept reports from users that are fully taken-down - const actor = await ctx.services.actor(db).getActor(requester, true) - if (actor && softDeleted(actor)) { - throw new AuthRequiredError() - } - } - - const report = await db.transaction(async (dbTxn) => { - const moderationTxn = ctx.services.moderation(dbTxn) - return moderationTxn.report({ - reasonType: getReasonType(reasonType), - reason, - subject: getSubject(subject), - reportedBy: requester || ctx.cfg.serverDid, - }) - }) - - const moderationService = ctx.services.moderation(db) - return { - encoding: 'application/json', - body: moderationService.views.reportPublic(report), - } - }, - }) -} diff --git a/packages/bsky/src/api/com/atproto/moderation/util.ts b/packages/bsky/src/api/com/atproto/moderation/util.ts deleted file mode 100644 index bc0ece2ff9f..00000000000 --- a/packages/bsky/src/api/com/atproto/moderation/util.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { CID } from 'multiformats/cid' -import { InvalidRequestError } from '@atproto/xrpc-server' -import { AtUri } from '@atproto/syntax' -import { InputSchema as ReportInput } from '../../../../lexicon/types/com/atproto/moderation/createReport' -import { InputSchema as ActionInput } from '../../../../lexicon/types/com/atproto/admin/emitModerationEvent' -import { - REASONOTHER, - REASONSPAM, - REASONMISLEADING, - REASONRUDE, - REASONSEXUAL, - REASONVIOLATION, -} from '../../../../lexicon/types/com/atproto/moderation/defs' -import { - REVIEWCLOSED, - REVIEWESCALATED, - REVIEWOPEN, -} from '../../../../lexicon/types/com/atproto/admin/defs' -import { ModerationEvent } from '../../../../db/tables/moderation' -import { ModerationSubjectStatusRow } from '../../../../services/moderation/types' - -type SubjectInput = ReportInput['subject'] | ActionInput['subject'] - -export const getSubject = (subject: SubjectInput) => { - if ( - subject.$type === 'com.atproto.admin.defs#repoRef' && - typeof subject.did === 'string' - ) { - return { did: subject.did } - } - if ( - subject.$type === 'com.atproto.repo.strongRef' && - typeof subject.uri === 'string' && - typeof subject.cid === 'string' - ) { - const uri = new AtUri(subject.uri) - return { - uri, - cid: CID.parse(subject.cid), - } - } - throw new InvalidRequestError('Invalid subject') -} - -export const getReasonType = (reasonType: ReportInput['reasonType']) => { - if (reasonTypes.has(reasonType)) { - return reasonType as NonNullable['reportType'] - } - throw new InvalidRequestError('Invalid reason type') -} - -export const getEventType = (type: string) => { - if (eventTypes.has(type)) { - return type as ModerationEvent['action'] - } - throw new InvalidRequestError('Invalid event type') -} - -export const getReviewState = (reviewState?: string) => { - if (!reviewState) return undefined - if (reviewStates.has(reviewState)) { - return reviewState as ModerationSubjectStatusRow['reviewState'] - } - throw new InvalidRequestError('Invalid review state') -} - -const reviewStates = new Set([REVIEWCLOSED, REVIEWESCALATED, REVIEWOPEN]) - -const reasonTypes = new Set([ - REASONOTHER, - REASONSPAM, - REASONMISLEADING, - REASONRUDE, - REASONSEXUAL, - REASONVIOLATION, -]) - -const eventTypes = new Set([ - 'com.atproto.admin.defs#modEventTakedown', - 'com.atproto.admin.defs#modEventAcknowledge', - 'com.atproto.admin.defs#modEventEscalate', - 'com.atproto.admin.defs#modEventComment', - 'com.atproto.admin.defs#modEventLabel', - 'com.atproto.admin.defs#modEventReport', - 'com.atproto.admin.defs#modEventMute', - 'com.atproto.admin.defs#modEventUnmute', - 'com.atproto.admin.defs#modEventReverseTakedown', - 'com.atproto.admin.defs#modEventEmail', -]) diff --git a/packages/bsky/src/auto-moderator/abyss.ts b/packages/bsky/src/auto-moderator/abyss.ts deleted file mode 100644 index 4799c7067a5..00000000000 --- a/packages/bsky/src/auto-moderator/abyss.ts +++ /dev/null @@ -1,114 +0,0 @@ -import axios from 'axios' -import { CID } from 'multiformats/cid' -import { AtUri } from '@atproto/syntax' -import * as ui8 from 'uint8arrays' -import { resolveBlob } from '../api/blob-resolver' -import { retryHttp } from '../util/retry' -import { PrimaryDatabase } from '../db' -import { IdResolver } from '@atproto/identity' -import { labelerLogger as log } from '../logger' - -export interface ImageFlagger { - scanImage(did: string, cid: CID, uri: AtUri): Promise -} - -export class Abyss implements ImageFlagger { - protected auth: string - - constructor( - public endpoint: string, - protected password: string, - public ctx: { db: PrimaryDatabase; idResolver: IdResolver }, - ) { - this.auth = basicAuth(this.password) - } - - async scanImage(did: string, cid: CID, uri: AtUri): Promise { - const start = Date.now() - const res = await retryHttp(async () => { - try { - return await this.makeReq(did, cid, uri) - } catch (err) { - log.warn({ err, did, cid: cid.toString() }, 'abyss request failed') - throw err - } - }) - log.info( - { res, did, cid: cid.toString(), duration: Date.now() - start }, - 'abyss response', - ) - return this.parseRes(res) - } - - async makeReq(did: string, cid: CID, uri: AtUri): Promise { - const { stream, contentType } = await resolveBlob( - did, - cid, - this.ctx.db, - this.ctx.idResolver, - ) - const { data } = await axios.post( - this.getReqUrl({ did, uri: uri.toString() }), - stream, - { - headers: { - 'Content-Type': contentType, - authorization: this.auth, - }, - timeout: 10000, - }, - ) - return data - } - - parseRes(res: ScannerResp): string[] { - if (!res.match || res.match.status !== 'success') { - return [] - } - const labels: string[] = [] - for (const hit of res.match.hits) { - if (TAKEDOWN_LABELS.includes(hit.label)) { - labels.push(hit.label) - } - } - return labels - } - - getReqUrl(params: { did: string; uri: string }) { - const search = new URLSearchParams(params) - return `${ - this.endpoint - }/xrpc/com.atproto.unspecced.scanBlob?${search.toString()}` - } -} - -const TAKEDOWN_LABELS = ['csam', 'csem'] - -type ScannerResp = { - blob: unknown - match?: { - status: string - hits: ScannerHit[] - } - classify?: { - hits?: unknown[] - } - review?: { - state?: string - ticketId?: string - } -} - -type ScannerHit = { - hashType: string - hashValue: string - label: string - corpus: string -} - -const basicAuth = (password: string) => { - return ( - 'Basic ' + - ui8.toString(ui8.fromString(`admin:${password}`, 'utf8'), 'base64pad') - ) -} diff --git a/packages/bsky/src/auto-moderator/fuzzy-matcher.ts b/packages/bsky/src/auto-moderator/fuzzy-matcher.ts deleted file mode 100644 index 07b5fb9a85e..00000000000 --- a/packages/bsky/src/auto-moderator/fuzzy-matcher.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { dedupeStrs } from '@atproto/common' -import * as ui8 from 'uint8arrays' - -export interface TextFlagger { - getMatches(string: string): string[] -} - -export class FuzzyMatcher implements TextFlagger { - private bannedWords: Set - private falsePositives: Set - - constructor(bannedWords: string[], falsePositives: string[] = []) { - this.bannedWords = new Set(bannedWords.map((word) => word.toLowerCase())) - this.falsePositives = new Set( - falsePositives.map((word) => word.toLowerCase()), - ) - } - - static fromB64(bannedB64: string, falsePositivesB64?: string) { - return new FuzzyMatcher( - decode(bannedB64), - falsePositivesB64 ? decode(falsePositivesB64) : undefined, - ) - } - - private normalize(domain: string): string[] { - const withoutSymbols = domain.replace(/[\W_]+/g, '') // Remove non-alphanumeric characters - const lowercase = withoutSymbols.toLowerCase() - - // Replace common leetspeak characters - const leetSpeakReplacements: { [key: string]: string[] } = { - '0': ['o'], - '8': ['b'], - '3': ['e'], - '4': ['a'], - '6': ['g'], - '1': ['i', 'l'], - '5': ['s'], - '7': ['t'], - } - - return this.generatePermutations(lowercase, leetSpeakReplacements) - } - - private generatePermutations( - domain: string, - leetSpeakReplacements: { [key: string]: string[] }, - ): string[] { - const results: string[] = [] - - const leetChars = Object.keys(leetSpeakReplacements) - const firstLeetCharIndex = [...domain].findIndex((char) => - leetChars.includes(char), - ) - - if (firstLeetCharIndex === -1) { - // No leetspeak characters left in the string - results.push(domain) - } else { - const char = domain[firstLeetCharIndex] - const beforeChar = domain.slice(0, firstLeetCharIndex) - const afterChar = domain.slice(firstLeetCharIndex + 1) - - // For each replacement, generate all possible combinations - for (const replacement of leetSpeakReplacements[char]) { - const replaced = beforeChar + replacement + afterChar - - // Recursively generate all permutations for the rest of the string - const otherPermutations = this.generatePermutations( - replaced, - leetSpeakReplacements, - ) - - // Add these permutations to the results - results.push(...otherPermutations) - } - } - - return dedupeStrs(results) - } - - public getMatches(domain: string): string[] { - const normalizedDomains = this.normalize(domain) - - const foundUnacceptableWords: string[] = [] - - for (const normalizedDomain of normalizedDomains) { - for (const word of this.bannedWords) { - const match = normalizedDomain.indexOf(word) - if (match > -1) { - let isFalsePositive = false - for (const falsePositive of this.falsePositives) { - const s_fp = falsePositive.indexOf(word) - const s_nd = match - s_fp - const wordToMatch = normalizedDomain.slice( - s_nd, - s_nd + falsePositive.length, - ) - if (wordToMatch === falsePositive) { - isFalsePositive = true - break - } - } - - if (!isFalsePositive) { - foundUnacceptableWords.push(word) - } - } - } - } - - if (foundUnacceptableWords.length > 0) { - return foundUnacceptableWords - } - - return [] - } -} - -export const decode = (encoded: string): string[] => { - return ui8.toString(ui8.fromString(encoded, 'base64'), 'utf8').split(',') -} - -export const encode = (words: string[]): string => { - return ui8.toString(ui8.fromString(words.join(','), 'utf8'), 'base64') -} diff --git a/packages/bsky/src/auto-moderator/hive.ts b/packages/bsky/src/auto-moderator/hive.ts deleted file mode 100644 index 51d67c1c783..00000000000 --- a/packages/bsky/src/auto-moderator/hive.ts +++ /dev/null @@ -1,187 +0,0 @@ -import axios from 'axios' -import FormData from 'form-data' -import { CID } from 'multiformats/cid' -import { IdResolver } from '@atproto/identity' -import { PrimaryDatabase } from '../db' -import { retryHttp } from '../util/retry' -import { resolveBlob } from '../api/blob-resolver' -import { labelerLogger as log } from '../logger' - -const HIVE_ENDPOINT = 'https://api.thehive.ai/api/v2/task/sync' - -export interface ImgLabeler { - labelImg(did: string, cid: CID): Promise -} - -export class HiveLabeler implements ImgLabeler { - constructor( - public hiveApiKey: string, - protected ctx: { - db: PrimaryDatabase - idResolver: IdResolver - }, - ) {} - - async labelImg(did: string, cid: CID): Promise { - const hiveRes = await retryHttp(async () => { - try { - return await this.makeHiveReq(did, cid) - } catch (err) { - log.warn({ err, did, cid: cid.toString() }, 'hive request failed') - throw err - } - }) - log.info({ hiveRes, did, cid: cid.toString() }, 'hive response') - const classes = respToClasses(hiveRes) - return summarizeLabels(classes) - } - - async makeHiveReq(did: string, cid: CID): Promise { - const { stream } = await resolveBlob( - did, - cid, - this.ctx.db, - this.ctx.idResolver, - ) - const form = new FormData() - form.append('media', stream) - const { data } = await axios.post(HIVE_ENDPOINT, form, { - headers: { - 'Content-Type': 'multipart/form-data', - authorization: `token ${this.hiveApiKey}`, - accept: 'application/json', - }, - }) - return data - } -} - -export const respToClasses = (res: HiveResp): HiveRespClass[] => { - const classes: HiveRespClass[] = [] - for (const status of res.status) { - for (const out of status.response.output) { - for (const cls of out.classes) { - classes.push(cls) - } - } - } - return classes -} - -// Matches only one (or none) of: porn, sexual, nudity -// -// porn: sexual and nudity. including both explicit activity or full-frontal and suggestive/intent -// sexual: sexually suggestive, not explicit; may include some forms of nudity -// nudity: non-sexual nudity (eg, artistic, possibly some photographic) -// -// hive docs/definitions: https://docs.thehive.ai/docs/sexual-content -export const sexualLabels = (classes: HiveRespClass[]): string[] => { - const scores = {} - - for (const cls of classes) { - scores[cls.class] = cls.score - } - - // first check if porn... - for (const pornClass of [ - 'yes_sexual_activity', - 'animal_genitalia_and_human', - 'yes_realistic_nsfw', - ]) { - if (scores[pornClass] >= 0.9) { - return ['porn'] - } - } - if (scores['general_nsfw'] >= 0.9) { - // special case for some anime examples - if (scores['animated_animal_genitalia'] >= 0.5) { - return ['porn'] - } - // special case for some pornographic/explicit classic drawings - if (scores['yes_undressed'] >= 0.9 && scores['yes_sexual_activity'] > 0.9) { - return ['porn'] - } - } - - // then check for sexual suggestive (which may include nudity)... - for (const sexualClass of ['yes_sexual_intent', 'yes_sex_toy']) { - if (scores[sexualClass] >= 0.9) { - return ['sexual'] - } - } - if (scores['yes_undressed'] >= 0.9) { - // special case for bondage examples - if (scores['yes_sex_toy'] > 0.75) { - return ['sexual'] - } - } - - // then non-sexual nudity... - for (const nudityClass of [ - 'yes_male_nudity', - 'yes_female_nudity', - 'yes_undressed', - ]) { - if (scores[nudityClass] >= 0.9) { - return ['nudity'] - } - } - - // then finally flag remaining "underwear" images in to sexually suggestive - // (after non-sexual content already labeled above) - for (const nudityClass of ['yes_male_underwear', 'yes_female_underwear']) { - if (scores[nudityClass] >= 0.9) { - // TODO: retaining 'underwear' label for a short time to help understand - // the impact of labeling all "underwear" as "sexual". This *will* be - // pulling in somewhat non-sexual content in to "sexual" label. - return ['sexual'] - } - } - - return [] -} - -// gore and violence: https://docs.thehive.ai/docs/class-descriptions-violence-gore -const labelForClass = { - very_bloody: 'gore', - human_corpse: 'corpse', - hanging: 'corpse', -} -const labelForClassLessSensitive = { - yes_self_harm: 'self-harm', -} - -export const summarizeLabels = (classes: HiveRespClass[]): string[] => { - const labels: string[] = sexualLabels(classes) - for (const cls of classes) { - if (labelForClass[cls.class] && cls.score >= 0.9) { - labels.push(labelForClass[cls.class]) - } - } - for (const cls of classes) { - if (labelForClassLessSensitive[cls.class] && cls.score >= 0.96) { - labels.push(labelForClassLessSensitive[cls.class]) - } - } - return labels -} - -type HiveResp = { - status: HiveRespStatus[] -} - -type HiveRespStatus = { - response: { - output: HiveRespOutput[] - } -} - -type HiveRespOutput = { - time: number - classes: HiveRespClass[] -} - -type HiveRespClass = { - class: string - score: number -} diff --git a/packages/bsky/src/auto-moderator/index.ts b/packages/bsky/src/auto-moderator/index.ts deleted file mode 100644 index 8925314808c..00000000000 --- a/packages/bsky/src/auto-moderator/index.ts +++ /dev/null @@ -1,302 +0,0 @@ -import { AtUri } from '@atproto/syntax' -import { AtpAgent } from '@atproto/api' -import { dedupe, getFieldsFromRecord } from './util' -import { labelerLogger as log } from '../logger' -import { PrimaryDatabase } from '../db' -import { IdResolver } from '@atproto/identity' -import { BackgroundQueue } from '../background' -import { IndexerConfig } from '../indexer/config' -import { buildBasicAuth } from '../auth' -import { CID } from 'multiformats/cid' -import { LabelService } from '../services/label' -import { ModerationService } from '../services/moderation' -import { ImageFlagger } from './abyss' -import { HiveLabeler, ImgLabeler } from './hive' -import { KeywordLabeler, TextLabeler } from './keyword' -import { ids } from '../lexicon/lexicons' -import { ImageUriBuilder } from '../image/uri' -import { ImageInvalidator } from '../image/invalidator' -import { Abyss } from './abyss' -import { FuzzyMatcher, TextFlagger } from './fuzzy-matcher' -import { - REASONOTHER, - REASONVIOLATION, -} from '../lexicon/types/com/atproto/moderation/defs' - -export class AutoModerator { - public pushAgent?: AtpAgent - public imageFlagger?: ImageFlagger - public textFlagger?: TextFlagger - public imgLabeler?: ImgLabeler - public textLabeler?: TextLabeler - - services: { - label: (db: PrimaryDatabase) => LabelService - moderation?: (db: PrimaryDatabase) => ModerationService - } - - constructor( - public ctx: { - db: PrimaryDatabase - idResolver: IdResolver - cfg: IndexerConfig - backgroundQueue: BackgroundQueue - imgUriBuilder?: ImageUriBuilder - imgInvalidator?: ImageInvalidator - }, - ) { - const { imgUriBuilder, imgInvalidator } = ctx - const { hiveApiKey, abyssEndpoint, abyssPassword } = ctx.cfg - this.services = { - label: LabelService.creator(null), - } - if (imgUriBuilder && imgInvalidator) { - this.services.moderation = ModerationService.creator( - imgUriBuilder, - imgInvalidator, - ) - } else { - log.error( - { imgUriBuilder, imgInvalidator }, - 'moderation service not properly configured', - ) - } - this.imgLabeler = hiveApiKey ? new HiveLabeler(hiveApiKey, ctx) : undefined - this.textLabeler = new KeywordLabeler(ctx.cfg.labelerKeywords) - if (abyssEndpoint && abyssPassword) { - this.imageFlagger = new Abyss(abyssEndpoint, abyssPassword, ctx) - } else { - log.error( - { abyssEndpoint, abyssPassword }, - 'abyss not properly configured', - ) - } - - if (ctx.cfg.fuzzyMatchB64) { - this.textFlagger = FuzzyMatcher.fromB64( - ctx.cfg.fuzzyMatchB64, - ctx.cfg.fuzzyFalsePositiveB64, - ) - } - - if (ctx.cfg.moderationPushUrl) { - const url = new URL(ctx.cfg.moderationPushUrl) - this.pushAgent = new AtpAgent({ service: url.origin }) - this.pushAgent.api.setHeader( - 'authorization', - buildBasicAuth(url.username, url.password), - ) - } - } - - processRecord(uri: AtUri, cid: CID, obj: unknown) { - this.ctx.backgroundQueue.add(async () => { - const { text, imgs } = getFieldsFromRecord(obj, uri) - await Promise.all([ - this.labelRecord(uri, cid, text, imgs).catch((err) => { - log.error( - { err, uri: uri.toString(), record: obj }, - 'failed to label record', - ) - }), - this.flagRecordText(uri, cid, text).catch((err) => { - log.error( - { err, uri: uri.toString(), record: obj }, - 'failed to check record for text flagging', - ) - }), - this.checkImgForTakedown(uri, cid, imgs).catch((err) => { - log.error( - { err, uri: uri.toString(), record: obj }, - 'failed to check img for takedown', - ) - }), - ]) - }) - } - - processHandle(handle: string, did: string) { - this.ctx.backgroundQueue.add(async () => { - await this.flagSubjectText(handle, { did }).catch((err) => { - log.error({ err, handle, did }, 'failed to label handle') - }) - }) - } - - async labelRecord(uri: AtUri, recordCid: CID, text: string[], imgs: CID[]) { - if (uri.collection !== ids.AppBskyFeedPost) { - // @TODO label profiles - return - } - const allLabels = await Promise.all([ - this.textLabeler?.labelText(text.join(' ')), - ...imgs.map((cid) => this.imgLabeler?.labelImg(uri.host, cid)), - ]) - const labels = dedupe(allLabels.flat()) - await this.storeLabels(uri, recordCid, labels) - } - - async flagRecordText(uri: AtUri, cid: CID, text: string[]) { - if ( - ![ - ids.AppBskyActorProfile, - ids.AppBskyGraphList, - ids.AppBskyFeedGenerator, - ].includes(uri.collection) - ) { - return - } - return this.flagSubjectText(text.join(' '), { uri, cid }) - } - - async flagSubjectText( - text: string, - subject: { did: string } | { uri: AtUri; cid: CID }, - ) { - if (!this.textFlagger) return - const matches = this.textFlagger.getMatches(text) - if (matches.length < 1) return - await this.ctx.db.transaction(async (dbTxn) => { - if (!this.services.moderation) { - log.error( - { subject, text, matches }, - 'no moderation service setup to flag record text', - ) - return - } - return this.services.moderation(dbTxn).report({ - reasonType: REASONOTHER, - reason: `Automatically flagged for possible slurs: ${matches.join( - ', ', - )}`, - subject, - reportedBy: this.ctx.cfg.labelerDid, - }) - }) - } - - async checkImgForTakedown(uri: AtUri, recordCid: CID, imgCids: CID[]) { - if (imgCids.length < 0) return - const results = await Promise.all( - imgCids.map((cid) => this.imageFlagger?.scanImage(uri.host, cid, uri)), - ) - const takedownCids: CID[] = [] - for (let i = 0; i < results.length; i++) { - if (results.at(i)?.length) { - takedownCids.push(imgCids[i]) - } - } - if (takedownCids.length === 0) return - try { - await this.persistTakedown( - uri, - recordCid, - takedownCids, - dedupe(results.flat()), - ) - } catch (err) { - log.error( - { - err, - uri: uri.toString(), - imgCids: imgCids.map((c) => c.toString()), - results, - }, - 'failed to persist takedown', - ) - } - } - - async persistTakedown( - uri: AtUri, - recordCid: CID, - takedownCids: CID[], - labels: string[], - ) { - const reportReason = `automated takedown (${labels.join( - ', ', - )}). account needs review and possibly additional action` - const takedownReason = `automated takedown for labels: ${labels.join(', ')}` - log.warn( - { - uri: uri.toString(), - blobCids: takedownCids, - labels, - }, - 'hard takedown of record (and blobs) based on auto-matching', - ) - - if (this.services.moderation) { - await this.ctx.db.transaction(async (dbTxn) => { - // directly/locally create report, even if we use pushAgent for the takedown. don't have acctual account credentials for pushAgent, only admin auth - if (!this.services.moderation) { - // checked above, outside the transaction - return - } - const modSrvc = this.services.moderation(dbTxn) - await modSrvc.report({ - reportedBy: this.ctx.cfg.labelerDid, - reasonType: REASONVIOLATION, - subject: { - uri: uri, - cid: recordCid, - }, - reason: reportReason, - }) - }) - } - - if (this.pushAgent) { - await this.pushAgent.com.atproto.admin.emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventTakedown', - comment: takedownReason, - }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: uri.toString(), - cid: recordCid.toString(), - }, - subjectBlobCids: takedownCids.map((c) => c.toString()), - createdBy: this.ctx.cfg.labelerDid, - }) - } else { - await this.ctx.db.transaction(async (dbTxn) => { - if (!this.services.moderation) { - throw new Error('no mod push agent or uri invalidator setup') - } - const modSrvc = this.services.moderation(dbTxn) - const action = await modSrvc.logEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventTakedown', - comment: takedownReason, - }, - subject: { uri, cid: recordCid }, - subjectBlobCids: takedownCids, - createdBy: this.ctx.cfg.labelerDid, - }) - await modSrvc.takedownRecord({ - takedownId: action.id, - uri: uri, - cid: recordCid, - blobCids: takedownCids, - }) - }) - } - } - - async storeLabels(uri: AtUri, cid: CID, labels: string[]): Promise { - if (labels.length < 1) return - const labelSrvc = this.services.label(this.ctx.db) - await labelSrvc.formatAndCreate( - this.ctx.cfg.labelerDid, - uri.toString(), - cid.toString(), - { create: labels }, - ) - } - - async processAll() { - await this.ctx.backgroundQueue.processAll() - } -} diff --git a/packages/bsky/src/auto-moderator/keyword.ts b/packages/bsky/src/auto-moderator/keyword.ts deleted file mode 100644 index 6bc504aa142..00000000000 --- a/packages/bsky/src/auto-moderator/keyword.ts +++ /dev/null @@ -1,25 +0,0 @@ -export interface TextLabeler { - labelText(text: string): Promise -} - -export class KeywordLabeler implements TextLabeler { - constructor(public keywords: Record) {} - - async labelText(text: string): Promise { - return keywordLabeling(this.keywords, text) - } -} - -export const keywordLabeling = ( - keywords: Record, - text: string, -): string[] => { - const lowerText = text.toLowerCase() - const labels: string[] = [] - for (const word of Object.keys(keywords)) { - if (lowerText.includes(word)) { - labels.push(keywords[word]) - } - } - return labels -} diff --git a/packages/bsky/src/auto-moderator/util.ts b/packages/bsky/src/auto-moderator/util.ts deleted file mode 100644 index ab1467a07f2..00000000000 --- a/packages/bsky/src/auto-moderator/util.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { CID } from 'multiformats/cid' -import { AtUri } from '@atproto/syntax' -import * as lex from '../lexicon/lexicons' -import { - isRecord as isPost, - Record as PostRecord, -} from '../lexicon/types/app/bsky/feed/post' -import { - isRecord as isProfile, - Record as ProfileRecord, -} from '../lexicon/types/app/bsky/actor/profile' -import { - isRecord as isList, - Record as ListRecord, -} from '../lexicon/types/app/bsky/graph/list' -import { - isRecord as isGenerator, - Record as GeneratorRecord, -} from '../lexicon/types/app/bsky/feed/generator' -import { isMain as isEmbedImage } from '../lexicon/types/app/bsky/embed/images' -import { isMain as isEmbedExternal } from '../lexicon/types/app/bsky/embed/external' -import { isMain as isEmbedRecordWithMedia } from '../lexicon/types/app/bsky/embed/recordWithMedia' - -type RecordFields = { - text: string[] - imgs: CID[] -} - -export const getFieldsFromRecord = ( - record: unknown, - uri: AtUri, -): RecordFields => { - if (isPost(record)) { - return getFieldsFromPost(record) - } else if (isProfile(record)) { - return getFieldsFromProfile(record) - } else if (isList(record)) { - return getFieldsFromList(record) - } else if (isGenerator(record)) { - return getFieldsFromGenerator(record, uri) - } else { - return { text: [], imgs: [] } - } -} - -export const getFieldsFromPost = (record: PostRecord): RecordFields => { - const text: string[] = [] - const imgs: CID[] = [] - text.push(record.text) - const embeds = separateEmbeds(record.embed) - for (const embed of embeds) { - if (isEmbedImage(embed)) { - for (const img of embed.images) { - imgs.push(img.image.ref) - text.push(img.alt) - } - } else if (isEmbedExternal(embed)) { - if (embed.external.thumb) { - imgs.push(embed.external.thumb.ref) - } - text.push(embed.external.title) - text.push(embed.external.description) - } - } - return { text, imgs } -} - -export const getFieldsFromProfile = (record: ProfileRecord): RecordFields => { - const text: string[] = [] - const imgs: CID[] = [] - if (record.displayName) { - text.push(record.displayName) - } - if (record.description) { - text.push(record.description) - } - if (record.avatar) { - imgs.push(record.avatar.ref) - } - if (record.banner) { - imgs.push(record.banner.ref) - } - return { text, imgs } -} - -export const getFieldsFromList = (record: ListRecord): RecordFields => { - const text: string[] = [] - const imgs: CID[] = [] - if (record.name) { - text.push(record.name) - } - if (record.description) { - text.push(record.description) - } - if (record.avatar) { - imgs.push(record.avatar.ref) - } - return { text, imgs } -} - -export const getFieldsFromGenerator = ( - record: GeneratorRecord, - uri: AtUri, -): RecordFields => { - const text: string[] = [] - const imgs: CID[] = [] - text.push(uri.rkey) - if (record.displayName) { - text.push(record.displayName) - } - if (record.description) { - text.push(record.description) - } - if (record.avatar) { - imgs.push(record.avatar.ref) - } - return { text, imgs } -} - -export const dedupe = (strs: (string | undefined)[]): string[] => { - const set = new Set() - for (const str of strs) { - if (str !== undefined) { - set.add(str) - } - } - return [...set] -} - -const separateEmbeds = (embed: PostRecord['embed']) => { - if (!embed) { - return [] - } - if (isEmbedRecordWithMedia(embed)) { - return [{ $type: lex.ids.AppBskyEmbedRecord, ...embed.record }, embed.media] - } - return [embed] -} diff --git a/packages/bsky/src/daemon/config.ts b/packages/bsky/src/daemon/config.ts deleted file mode 100644 index e0e789203e4..00000000000 --- a/packages/bsky/src/daemon/config.ts +++ /dev/null @@ -1,50 +0,0 @@ -import assert from 'assert' - -export interface DaemonConfigValues { - version: string - dbPostgresUrl: string - dbPostgresSchema?: string -} - -export class DaemonConfig { - constructor(private cfg: DaemonConfigValues) {} - - static readEnv(overrides?: Partial) { - const version = process.env.BSKY_VERSION || '0.0.0' - const dbPostgresUrl = - overrides?.dbPostgresUrl || process.env.DB_PRIMARY_POSTGRES_URL - const dbPostgresSchema = - overrides?.dbPostgresSchema || process.env.DB_POSTGRES_SCHEMA - assert(dbPostgresUrl) - return new DaemonConfig({ - version, - dbPostgresUrl, - dbPostgresSchema, - ...stripUndefineds(overrides ?? {}), - }) - } - - get version() { - return this.cfg.version - } - - get dbPostgresUrl() { - return this.cfg.dbPostgresUrl - } - - get dbPostgresSchema() { - return this.cfg.dbPostgresSchema - } -} - -function stripUndefineds( - obj: Record, -): Record { - const result = {} - Object.entries(obj).forEach(([key, val]) => { - if (val !== undefined) { - result[key] = val - } - }) - return result -} diff --git a/packages/bsky/src/daemon/context.ts b/packages/bsky/src/daemon/context.ts deleted file mode 100644 index dd3d5c1114f..00000000000 --- a/packages/bsky/src/daemon/context.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { PrimaryDatabase } from '../db' -import { DaemonConfig } from './config' -import { Services } from './services' - -export class DaemonContext { - constructor( - private opts: { - db: PrimaryDatabase - cfg: DaemonConfig - services: Services - }, - ) {} - - get db(): PrimaryDatabase { - return this.opts.db - } - - get cfg(): DaemonConfig { - return this.opts.cfg - } - - get services(): Services { - return this.opts.services - } -} - -export default DaemonContext diff --git a/packages/bsky/src/daemon/index.ts b/packages/bsky/src/daemon/index.ts deleted file mode 100644 index 61bcd8568f4..00000000000 --- a/packages/bsky/src/daemon/index.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { PrimaryDatabase } from '../db' -import { dbLogger } from '../logger' -import { DaemonConfig } from './config' -import { DaemonContext } from './context' -import { createServices } from './services' -import { ImageUriBuilder } from '../image/uri' -import { LabelCache } from '../label-cache' -import { NotificationsDaemon } from './notifications' -import logger from './logger' - -export { DaemonConfig } from './config' -export type { DaemonConfigValues } from './config' - -export class BskyDaemon { - public ctx: DaemonContext - public notifications: NotificationsDaemon - private dbStatsInterval: NodeJS.Timer - private notifStatsInterval: NodeJS.Timer - - constructor(opts: { - ctx: DaemonContext - notifications: NotificationsDaemon - }) { - this.ctx = opts.ctx - this.notifications = opts.notifications - } - - static create(opts: { db: PrimaryDatabase; cfg: DaemonConfig }): BskyDaemon { - const { db, cfg } = opts - const imgUriBuilder = new ImageUriBuilder('https://daemon.invalid') // will not be used by daemon - const labelCache = new LabelCache(db) - const services = createServices({ - imgUriBuilder, - labelCache, - }) - const ctx = new DaemonContext({ - db, - cfg, - services, - }) - const notifications = new NotificationsDaemon(ctx) - return new BskyDaemon({ ctx, notifications }) - } - - async start() { - const { db } = this.ctx - const pool = db.pool - this.notifications.run() - this.dbStatsInterval = setInterval(() => { - dbLogger.info( - { - idleCount: pool.idleCount, - totalCount: pool.totalCount, - waitingCount: pool.waitingCount, - }, - 'db pool stats', - ) - }, 10000) - this.notifStatsInterval = setInterval(() => { - logger.info( - { - count: this.notifications.count, - lastDid: this.notifications.lastDid, - }, - 'notifications daemon stats', - ) - }, 10000) - return this - } - - async destroy(): Promise { - await this.notifications.destroy() - await this.ctx.db.close() - clearInterval(this.dbStatsInterval) - clearInterval(this.notifStatsInterval) - } -} - -export default BskyDaemon diff --git a/packages/bsky/src/daemon/logger.ts b/packages/bsky/src/daemon/logger.ts deleted file mode 100644 index 8599acc315e..00000000000 --- a/packages/bsky/src/daemon/logger.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { subsystemLogger } from '@atproto/common' - -const logger: ReturnType = - subsystemLogger('bsky:daemon') - -export default logger diff --git a/packages/bsky/src/daemon/notifications.ts b/packages/bsky/src/daemon/notifications.ts deleted file mode 100644 index e8e884b37c2..00000000000 --- a/packages/bsky/src/daemon/notifications.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { tidyNotifications } from '../services/util/notification' -import DaemonContext from './context' -import logger from './logger' - -export class NotificationsDaemon { - ac = new AbortController() - running: Promise | undefined - count = 0 - lastDid: string | null = null - - constructor(private ctx: DaemonContext) {} - - run(opts?: RunOptions) { - if (this.running) return - this.count = 0 - this.lastDid = null - this.ac = new AbortController() - this.running = this.tidyNotifications({ - ...opts, - forever: opts?.forever !== false, // run forever by default - }) - .catch((err) => { - // allow this to cause an unhandled rejection, let deployment handle the crash. - logger.error({ err }, 'notifications daemon crashed') - throw err - }) - .finally(() => (this.running = undefined)) - } - - private async tidyNotifications(opts: RunOptions) { - const actorService = this.ctx.services.actor(this.ctx.db) - for await (const { did } of actorService.all(opts)) { - if (this.ac.signal.aborted) return - try { - await tidyNotifications(this.ctx.db, did) - this.count++ - this.lastDid = did - } catch (err) { - logger.warn({ err, did }, 'failed to tidy notifications for actor') - } - } - } - - async destroy() { - this.ac.abort() - await this.running - } -} - -type RunOptions = { forever?: boolean; batchSize?: number } diff --git a/packages/bsky/src/daemon/services.ts b/packages/bsky/src/daemon/services.ts deleted file mode 100644 index a4e7935523c..00000000000 --- a/packages/bsky/src/daemon/services.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { PrimaryDatabase } from '../db' -import { ActorService } from '../services/actor' -import { ImageUriBuilder } from '../image/uri' -import { LabelCache } from '../label-cache' - -export function createServices(resources: { - imgUriBuilder: ImageUriBuilder - labelCache: LabelCache -}): Services { - const { imgUriBuilder, labelCache } = resources - return { - actor: ActorService.creator(imgUriBuilder, labelCache), - } -} - -export type Services = { - actor: FromDbPrimary -} - -type FromDbPrimary = (db: PrimaryDatabase) => T diff --git a/packages/bsky/src/db/periodic-moderation-event-reversal.ts b/packages/bsky/src/db/periodic-moderation-event-reversal.ts deleted file mode 100644 index 9937c113d59..00000000000 --- a/packages/bsky/src/db/periodic-moderation-event-reversal.ts +++ /dev/null @@ -1,125 +0,0 @@ -import { wait } from '@atproto/common' -import { Leader } from './leader' -import { dbLogger } from '../logger' -import AppContext from '../context' -import { AtUri } from '@atproto/api' -import { ModerationSubjectStatusRow } from '../services/moderation/types' -import { CID } from 'multiformats/cid' -import AtpAgent from '@atproto/api' -import { retryHttp } from '../util/retry' - -export const MODERATION_ACTION_REVERSAL_ID = 1011 - -export class PeriodicModerationEventReversal { - leader = new Leader( - MODERATION_ACTION_REVERSAL_ID, - this.appContext.db.getPrimary(), - ) - destroyed = false - pushAgent?: AtpAgent - - constructor(private appContext: AppContext) { - this.pushAgent = appContext.moderationPushAgent - } - - async revertState(eventRow: ModerationSubjectStatusRow) { - await this.appContext.db.getPrimary().transaction(async (dbTxn) => { - const moderationTxn = this.appContext.services.moderation(dbTxn) - const originalEvent = - await moderationTxn.getLastReversibleEventForSubject(eventRow) - if (originalEvent) { - const { restored } = await moderationTxn.revertState({ - action: originalEvent.action, - createdBy: originalEvent.createdBy, - comment: - '[SCHEDULED_REVERSAL] Reverting action as originally scheduled', - subject: - eventRow.recordPath && eventRow.recordCid - ? { - uri: AtUri.make( - eventRow.did, - ...eventRow.recordPath.split('/'), - ), - cid: CID.parse(eventRow.recordCid), - } - : { did: eventRow.did }, - createdAt: new Date(), - }) - - const { pushAgent } = this - if ( - originalEvent.action === 'com.atproto.admin.defs#modEventTakedown' && - restored?.subjects?.length && - pushAgent - ) { - await Promise.allSettled( - restored.subjects.map((subject) => - retryHttp(() => - pushAgent.api.com.atproto.admin.updateSubjectStatus({ - subject, - takedown: { - applied: false, - }, - }), - ), - ), - ) - } - } - }) - } - - async findAndRevertDueActions() { - const moderationService = this.appContext.services.moderation( - this.appContext.db.getPrimary(), - ) - const subjectsDueForReversal = - await moderationService.getSubjectsDueForReversal() - - // We shouldn't have too many actions due for reversal at any given time, so running in parallel is probably fine - // Internally, each reversal runs within its own transaction - await Promise.all(subjectsDueForReversal.map(this.revertState.bind(this))) - } - - async run() { - while (!this.destroyed) { - try { - const { ran } = await this.leader.run(async ({ signal }) => { - while (!signal.aborted) { - // super basic synchronization by agreeing when the intervals land relative to unix timestamp - const now = Date.now() - const intervalMs = 1000 * 60 - const nextIteration = Math.ceil(now / intervalMs) - const nextInMs = nextIteration * intervalMs - now - await wait(nextInMs) - if (signal.aborted) break - await this.findAndRevertDueActions() - } - }) - if (ran && !this.destroyed) { - throw new Error('View maintainer completed, but should be persistent') - } - } catch (err) { - dbLogger.error( - { - err, - lockId: MODERATION_ACTION_REVERSAL_ID, - }, - 'moderation action reversal errored', - ) - } - if (!this.destroyed) { - await wait(10000 + jitter(2000)) - } - } - } - - destroy() { - this.destroyed = true - this.leader.destroy() - } -} - -function jitter(maxMs) { - return Math.round((Math.random() - 0.5) * maxMs * 2) -} diff --git a/packages/bsky/src/indexer/config.ts b/packages/bsky/src/indexer/config.ts deleted file mode 100644 index dd8b9ab89d5..00000000000 --- a/packages/bsky/src/indexer/config.ts +++ /dev/null @@ -1,263 +0,0 @@ -import assert from 'assert' -import { DAY, HOUR, parseIntWithFallback } from '@atproto/common' - -export interface IndexerConfigValues { - version: string - dbPostgresUrl: string - dbPostgresSchema?: string - redisHost?: string // either set redis host, or both sentinel name and hosts - redisSentinelName?: string - redisSentinelHosts?: string[] - redisPassword?: string - didPlcUrl: string - didCacheStaleTTL: number - didCacheMaxTTL: number - handleResolveNameservers?: string[] - labelerDid: string - hiveApiKey?: string - abyssEndpoint?: string - abyssPassword?: string - imgUriEndpoint?: string - fuzzyMatchB64?: string - fuzzyFalsePositiveB64?: string - labelerKeywords: Record - moderationPushUrl?: string - indexerConcurrency?: number - indexerPartitionIds: number[] - indexerPartitionBatchSize?: number - indexerSubLockId?: number - indexerPort?: number - ingesterPartitionCount: number - indexerNamespace?: string - pushNotificationEndpoint?: string -} - -export class IndexerConfig { - constructor(private cfg: IndexerConfigValues) {} - - static readEnv(overrides?: Partial) { - const version = process.env.BSKY_VERSION || '0.0.0' - const dbPostgresUrl = - overrides?.dbPostgresUrl || process.env.DB_PRIMARY_POSTGRES_URL - const dbPostgresSchema = - overrides?.dbPostgresSchema || process.env.DB_POSTGRES_SCHEMA - const redisHost = - overrides?.redisHost || process.env.REDIS_HOST || undefined - const redisSentinelName = - overrides?.redisSentinelName || - process.env.REDIS_SENTINEL_NAME || - undefined - const redisSentinelHosts = - overrides?.redisSentinelHosts || - (process.env.REDIS_SENTINEL_HOSTS - ? process.env.REDIS_SENTINEL_HOSTS.split(',') - : []) - const redisPassword = - overrides?.redisPassword || process.env.REDIS_PASSWORD || undefined - const didPlcUrl = process.env.DID_PLC_URL || 'http://localhost:2582' - const didCacheStaleTTL = parseIntWithFallback( - process.env.DID_CACHE_STALE_TTL, - HOUR, - ) - const didCacheMaxTTL = parseIntWithFallback( - process.env.DID_CACHE_MAX_TTL, - DAY, - ) - const handleResolveNameservers = process.env.HANDLE_RESOLVE_NAMESERVERS - ? process.env.HANDLE_RESOLVE_NAMESERVERS.split(',') - : [] - const labelerDid = process.env.LABELER_DID || 'did:example:labeler' - const moderationPushUrl = - overrides?.moderationPushUrl || - process.env.MODERATION_PUSH_URL || - undefined - const hiveApiKey = process.env.HIVE_API_KEY || undefined - const abyssEndpoint = process.env.ABYSS_ENDPOINT - const abyssPassword = process.env.ABYSS_PASSWORD - const imgUriEndpoint = process.env.IMG_URI_ENDPOINT - const indexerPartitionIds = - overrides?.indexerPartitionIds || - (process.env.INDEXER_PARTITION_IDS - ? process.env.INDEXER_PARTITION_IDS.split(',').map((n) => - parseInt(n, 10), - ) - : []) - const indexerPartitionBatchSize = maybeParseInt( - process.env.INDEXER_PARTITION_BATCH_SIZE, - ) - const indexerConcurrency = maybeParseInt(process.env.INDEXER_CONCURRENCY) - const indexerNamespace = overrides?.indexerNamespace - const indexerSubLockId = maybeParseInt(process.env.INDEXER_SUB_LOCK_ID) - const indexerPort = maybeParseInt(process.env.INDEXER_PORT) - const ingesterPartitionCount = - maybeParseInt(process.env.INGESTER_PARTITION_COUNT) ?? 64 - const labelerKeywords = {} - const fuzzyMatchB64 = process.env.FUZZY_MATCH_B64 || undefined - const fuzzyFalsePositiveB64 = - process.env.FUZZY_FALSE_POSITIVE_B64 || undefined - const pushNotificationEndpoint = process.env.PUSH_NOTIFICATION_ENDPOINT - assert(dbPostgresUrl) - assert(redisHost || (redisSentinelName && redisSentinelHosts?.length)) - assert(indexerPartitionIds.length > 0) - return new IndexerConfig({ - version, - dbPostgresUrl, - dbPostgresSchema, - redisHost, - redisSentinelName, - redisSentinelHosts, - redisPassword, - didPlcUrl, - didCacheStaleTTL, - didCacheMaxTTL, - handleResolveNameservers, - labelerDid, - moderationPushUrl, - hiveApiKey, - abyssEndpoint, - abyssPassword, - imgUriEndpoint, - indexerPartitionIds, - indexerConcurrency, - indexerPartitionBatchSize, - indexerNamespace, - indexerSubLockId, - indexerPort, - ingesterPartitionCount, - labelerKeywords, - fuzzyMatchB64, - fuzzyFalsePositiveB64, - pushNotificationEndpoint, - ...stripUndefineds(overrides ?? {}), - }) - } - - get version() { - return this.cfg.version - } - - get dbPostgresUrl() { - return this.cfg.dbPostgresUrl - } - - get dbPostgresSchema() { - return this.cfg.dbPostgresSchema - } - - get redisHost() { - return this.cfg.redisHost - } - - get redisSentinelName() { - return this.cfg.redisSentinelName - } - - get redisSentinelHosts() { - return this.cfg.redisSentinelHosts - } - - get redisPassword() { - return this.cfg.redisPassword - } - - get didPlcUrl() { - return this.cfg.didPlcUrl - } - - get didCacheStaleTTL() { - return this.cfg.didCacheStaleTTL - } - - get didCacheMaxTTL() { - return this.cfg.didCacheMaxTTL - } - - get handleResolveNameservers() { - return this.cfg.handleResolveNameservers - } - - get labelerDid() { - return this.cfg.labelerDid - } - - get moderationPushUrl() { - return this.cfg.moderationPushUrl - } - - get hiveApiKey() { - return this.cfg.hiveApiKey - } - - get abyssEndpoint() { - return this.cfg.abyssEndpoint - } - - get abyssPassword() { - return this.cfg.abyssPassword - } - - get imgUriEndpoint() { - return this.cfg.imgUriEndpoint - } - - get indexerConcurrency() { - return this.cfg.indexerConcurrency - } - - get indexerPartitionIds() { - return this.cfg.indexerPartitionIds - } - - get indexerPartitionBatchSize() { - return this.cfg.indexerPartitionBatchSize - } - - get indexerNamespace() { - return this.cfg.indexerNamespace - } - - get indexerSubLockId() { - return this.cfg.indexerSubLockId - } - - get indexerPort() { - return this.cfg.indexerPort - } - - get ingesterPartitionCount() { - return this.cfg.ingesterPartitionCount - } - - get labelerKeywords() { - return this.cfg.labelerKeywords - } - - get fuzzyMatchB64() { - return this.cfg.fuzzyMatchB64 - } - - get fuzzyFalsePositiveB64() { - return this.cfg.fuzzyFalsePositiveB64 - } - - get pushNotificationEndpoint() { - return this.cfg.pushNotificationEndpoint - } -} - -function stripUndefineds( - obj: Record, -): Record { - const result = {} - Object.entries(obj).forEach(([key, val]) => { - if (val !== undefined) { - result[key] = val - } - }) - return result -} - -function maybeParseInt(str) { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} diff --git a/packages/bsky/src/indexer/context.ts b/packages/bsky/src/indexer/context.ts deleted file mode 100644 index e7fe24580fa..00000000000 --- a/packages/bsky/src/indexer/context.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { IdResolver } from '@atproto/identity' -import { PrimaryDatabase } from '../db' -import { IndexerConfig } from './config' -import { Services } from './services' -import { BackgroundQueue } from '../background' -import DidSqlCache from '../did-cache' -import { Redis } from '../redis' -import { AutoModerator } from '../auto-moderator' - -export class IndexerContext { - constructor( - private opts: { - db: PrimaryDatabase - redis: Redis - cfg: IndexerConfig - services: Services - idResolver: IdResolver - didCache: DidSqlCache - backgroundQueue: BackgroundQueue - autoMod: AutoModerator - }, - ) {} - - get db(): PrimaryDatabase { - return this.opts.db - } - - get redis(): Redis { - return this.opts.redis - } - - get cfg(): IndexerConfig { - return this.opts.cfg - } - - get services(): Services { - return this.opts.services - } - - get idResolver(): IdResolver { - return this.opts.idResolver - } - - get didCache(): DidSqlCache { - return this.opts.didCache - } - - get backgroundQueue(): BackgroundQueue { - return this.opts.backgroundQueue - } - - get autoMod(): AutoModerator { - return this.opts.autoMod - } -} - -export default IndexerContext diff --git a/packages/bsky/src/indexer/index.ts b/packages/bsky/src/indexer/index.ts deleted file mode 100644 index ed8188d353b..00000000000 --- a/packages/bsky/src/indexer/index.ts +++ /dev/null @@ -1,148 +0,0 @@ -import express from 'express' -import { IdResolver } from '@atproto/identity' -import { BackgroundQueue } from '../background' -import { PrimaryDatabase } from '../db' -import DidSqlCache from '../did-cache' -import log from './logger' -import { dbLogger } from '../logger' -import { IndexerConfig } from './config' -import { IndexerContext } from './context' -import { createServices } from './services' -import { IndexerSubscription } from './subscription' -import { AutoModerator } from '../auto-moderator' -import { Redis } from '../redis' -import { NotificationServer } from '../notifications' -import { CloseFn, createServer, startServer } from './server' -import { ImageUriBuilder } from '../image/uri' -import { ImageInvalidator } from '../image/invalidator' - -export { IndexerConfig } from './config' -export type { IndexerConfigValues } from './config' - -export class BskyIndexer { - public ctx: IndexerContext - public sub: IndexerSubscription - public app: express.Application - private closeServer?: CloseFn - private dbStatsInterval: NodeJS.Timer - private subStatsInterval: NodeJS.Timer - - constructor(opts: { - ctx: IndexerContext - sub: IndexerSubscription - app: express.Application - }) { - this.ctx = opts.ctx - this.sub = opts.sub - this.app = opts.app - } - - static create(opts: { - db: PrimaryDatabase - redis: Redis - cfg: IndexerConfig - imgInvalidator?: ImageInvalidator - }): BskyIndexer { - const { db, redis, cfg } = opts - const didCache = new DidSqlCache( - db, - cfg.didCacheStaleTTL, - cfg.didCacheMaxTTL, - ) - const idResolver = new IdResolver({ - plcUrl: cfg.didPlcUrl, - didCache, - backupNameservers: cfg.handleResolveNameservers, - }) - const backgroundQueue = new BackgroundQueue(db) - - const imgUriBuilder = cfg.imgUriEndpoint - ? new ImageUriBuilder(cfg.imgUriEndpoint) - : undefined - const imgInvalidator = opts.imgInvalidator - const autoMod = new AutoModerator({ - db, - idResolver, - cfg, - backgroundQueue, - imgUriBuilder, - imgInvalidator, - }) - - const notifServer = cfg.pushNotificationEndpoint - ? new NotificationServer(db, cfg.pushNotificationEndpoint) - : undefined - const services = createServices({ - idResolver, - autoMod, - backgroundQueue, - notifServer, - }) - const ctx = new IndexerContext({ - db, - redis, - cfg, - services, - idResolver, - didCache, - backgroundQueue, - autoMod, - }) - const sub = new IndexerSubscription(ctx, { - partitionIds: cfg.indexerPartitionIds, - partitionBatchSize: cfg.indexerPartitionBatchSize, - concurrency: cfg.indexerConcurrency, - subLockId: cfg.indexerSubLockId, - }) - - const app = createServer(sub, cfg) - - return new BskyIndexer({ ctx, sub, app }) - } - - async start() { - const { db, backgroundQueue } = this.ctx - const pool = db.pool - this.dbStatsInterval = setInterval(() => { - dbLogger.info( - { - idleCount: pool.idleCount, - totalCount: pool.totalCount, - waitingCount: pool.waitingCount, - }, - 'db pool stats', - ) - dbLogger.info( - { - runningCount: backgroundQueue.queue.pending, - waitingCount: backgroundQueue.queue.size, - }, - 'background queue stats', - ) - }, 10000) - this.subStatsInterval = setInterval(() => { - log.info( - { - processedCount: this.sub.processedCount, - runningCount: this.sub.repoQueue.main.pending, - waitingCount: this.sub.repoQueue.main.size, - }, - 'indexer stats', - ) - }, 500) - this.sub.run() - this.closeServer = startServer(this.app, this.ctx.cfg.indexerPort) - return this - } - - async destroy(opts?: { skipDb: boolean; skipRedis: true }): Promise { - if (this.closeServer) await this.closeServer() - await this.sub.destroy() - clearInterval(this.subStatsInterval) - if (!opts?.skipRedis) await this.ctx.redis.destroy() - if (!opts?.skipDb) await this.ctx.db.close() - clearInterval(this.dbStatsInterval) - } -} - -export default BskyIndexer diff --git a/packages/bsky/src/indexer/logger.ts b/packages/bsky/src/indexer/logger.ts deleted file mode 100644 index 45752727f99..00000000000 --- a/packages/bsky/src/indexer/logger.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { subsystemLogger } from '@atproto/common' - -const logger: ReturnType = - subsystemLogger('bsky:indexer') - -export default logger diff --git a/packages/bsky/src/indexer/server.ts b/packages/bsky/src/indexer/server.ts deleted file mode 100644 index dfafb741eb4..00000000000 --- a/packages/bsky/src/indexer/server.ts +++ /dev/null @@ -1,46 +0,0 @@ -import express from 'express' -import { IndexerSubscription } from './subscription' -import { IndexerConfig } from './config' -import { randomIntFromSeed } from '@atproto/crypto' - -export type CloseFn = () => Promise - -export const createServer = ( - sub: IndexerSubscription, - cfg: IndexerConfig, -): express.Application => { - const app = express() - app.post('/reprocess/:did', async (req, res) => { - const did = req.params.did - try { - const partition = await randomIntFromSeed(did, cfg.ingesterPartitionCount) - const supportedPartition = cfg.indexerPartitionIds.includes(partition) - if (!supportedPartition) { - return res.status(400).send(`unsupported partition: ${partition}`) - } - } catch (err) { - return res.status(500).send('could not calculate partition') - } - await sub.requestReprocess(req.params.did) - res.sendStatus(200) - }) - return app -} - -export const startServer = ( - app: express.Application, - port?: number, -): CloseFn => { - const server = app.listen(port) - return () => { - return new Promise((resolve, reject) => { - server.close((err) => { - if (err) { - reject(err) - } else { - resolve() - } - }) - }) - } -} diff --git a/packages/bsky/src/indexer/services.ts b/packages/bsky/src/indexer/services.ts deleted file mode 100644 index df173352046..00000000000 --- a/packages/bsky/src/indexer/services.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { IdResolver } from '@atproto/identity' -import { PrimaryDatabase } from '../db' -import { BackgroundQueue } from '../background' -import { IndexingService } from '../services/indexing' -import { LabelService } from '../services/label' -import { NotificationServer } from '../notifications' -import { AutoModerator } from '../auto-moderator' - -export function createServices(resources: { - idResolver: IdResolver - autoMod: AutoModerator - backgroundQueue: BackgroundQueue - notifServer?: NotificationServer -}): Services { - const { idResolver, autoMod, backgroundQueue, notifServer } = resources - return { - indexing: IndexingService.creator( - idResolver, - autoMod, - backgroundQueue, - notifServer, - ), - label: LabelService.creator(null), - } -} - -export type Services = { - indexing: FromDbPrimary - label: FromDbPrimary -} - -type FromDbPrimary = (db: PrimaryDatabase) => T diff --git a/packages/bsky/src/indexer/subscription.ts b/packages/bsky/src/indexer/subscription.ts deleted file mode 100644 index abc672db3b0..00000000000 --- a/packages/bsky/src/indexer/subscription.ts +++ /dev/null @@ -1,345 +0,0 @@ -import assert from 'node:assert' -import { CID } from 'multiformats/cid' -import { AtUri } from '@atproto/syntax' -import { cborDecode, wait, handleAllSettledErrors } from '@atproto/common' -import { DisconnectError } from '@atproto/xrpc-server' -import { - WriteOpAction, - readCarWithRoot, - cborToLexRecord, - def, - Commit, -} from '@atproto/repo' -import { ValidationError } from '@atproto/lexicon' -import * as message from '../lexicon/types/com/atproto/sync/subscribeRepos' -import { Leader } from '../db/leader' -import { IndexingService } from '../services/indexing' -import log from './logger' -import { - ConsecutiveItem, - ConsecutiveList, - LatestQueue, - PartitionedQueue, - PerfectMap, - ProcessableMessage, - jitter, - loggableMessage, - strToInt, -} from '../subscription/util' -import IndexerContext from './context' - -export const INDEXER_SUB_LOCK_ID = 1200 // need one per partition - -export class IndexerSubscription { - destroyed = false - leader = new Leader(this.opts.subLockId || INDEXER_SUB_LOCK_ID, this.ctx.db) - processedCount = 0 - repoQueue = new PartitionedQueue({ - concurrency: this.opts.concurrency ?? Infinity, - }) - partitions = new PerfectMap() - partitionIds = this.opts.partitionIds - indexingSvc: IndexingService - - constructor( - public ctx: IndexerContext, - public opts: { - partitionIds: number[] - subLockId?: number - concurrency?: number - partitionBatchSize?: number - }, - ) { - this.indexingSvc = ctx.services.indexing(ctx.db) - } - - async processEvents(opts: { signal: AbortSignal }) { - const done = () => this.destroyed || opts.signal.aborted - while (!done()) { - const results = await this.ctx.redis.readStreams( - this.partitionIds.map((id) => ({ - key: partitionKey(id), - cursor: this.partitions.get(id).cursor, - })), - { - blockMs: 1000, - count: this.opts.partitionBatchSize ?? 50, // events per stream - }, - ) - if (done()) break - for (const { key, messages } of results) { - const partition = this.partitions.get(partitionId(key)) - for (const msg of messages) { - const seq = strToInt(msg.cursor) - const envelope = getEnvelope(msg.contents) - partition.cursor = seq - const item = partition.consecutive.push(seq) - this.repoQueue.add(envelope.repo, async () => { - await this.handleMessage(partition, item, envelope) - }) - } - } - await this.repoQueue.main.onEmpty() // backpressure - } - } - - async run() { - while (!this.destroyed) { - try { - const { ran } = await this.leader.run(async ({ signal }) => { - // initialize cursors to 0 (read from beginning of stream) - for (const id of this.partitionIds) { - this.partitions.set(id, new Partition(id, 0)) - } - // process events - await this.processEvents({ signal }) - }) - if (ran && !this.destroyed) { - throw new Error('Indexer sub completed, but should be persistent') - } - } catch (err) { - log.error({ err }, 'indexer sub error') - } - if (!this.destroyed) { - await wait(5000 + jitter(1000)) // wait then try to become leader - } - } - } - - async requestReprocess(did: string) { - await this.repoQueue.add(did, async () => { - try { - await this.indexingSvc.indexRepo(did, undefined) - } catch (err) { - log.error({ did }, 'failed to reprocess repo') - } - }) - } - - async destroy() { - this.destroyed = true - await this.repoQueue.destroy() - await Promise.all( - [...this.partitions.values()].map((p) => p.cursorQueue.destroy()), - ) - this.leader.destroy(new DisconnectError()) - } - - async resume() { - this.destroyed = false - this.partitions = new Map() - this.repoQueue = new PartitionedQueue({ - concurrency: this.opts.concurrency ?? Infinity, - }) - await this.run() - } - - private async handleMessage( - partition: Partition, - item: ConsecutiveItem, - envelope: Envelope, - ) { - const msg = envelope.event - try { - if (message.isCommit(msg)) { - await this.handleCommit(msg) - } else if (message.isHandle(msg)) { - await this.handleUpdateHandle(msg) - } else if (message.isTombstone(msg)) { - await this.handleTombstone(msg) - } else if (message.isMigrate(msg)) { - // Ignore migrations - } else { - const exhaustiveCheck: never = msg - throw new Error(`Unhandled message type: ${exhaustiveCheck['$type']}`) - } - } catch (err) { - // We log messages we can't process and move on: - // otherwise the cursor would get stuck on a poison message. - log.error( - { err, message: loggableMessage(msg) }, - 'indexer message processing error', - ) - } finally { - this.processedCount++ - const latest = item.complete().at(-1) - if (latest !== undefined) { - partition.cursorQueue - .add(async () => { - await this.ctx.redis.trimStream(partition.key, latest + 1) - }) - .catch((err) => { - log.error({ err }, 'indexer cursor error') - }) - } - } - } - - private async handleCommit(msg: message.Commit) { - const indexRecords = async () => { - const { root, rootCid, ops } = await getOps(msg) - if (msg.tooBig) { - await this.indexingSvc.indexRepo(msg.repo, rootCid.toString()) - await this.indexingSvc.setCommitLastSeen(root, msg) - return - } - if (msg.rebase) { - const needsReindex = await this.indexingSvc.checkCommitNeedsIndexing( - root, - ) - if (needsReindex) { - await this.indexingSvc.indexRepo(msg.repo, rootCid.toString()) - } - await this.indexingSvc.setCommitLastSeen(root, msg) - return - } - for (const op of ops) { - if (op.action === WriteOpAction.Delete) { - await this.indexingSvc.deleteRecord(op.uri) - } else { - try { - await this.indexingSvc.indexRecord( - op.uri, - op.cid, - op.record, - op.action, // create or update - msg.time, - ) - } catch (err) { - if (err instanceof ValidationError) { - log.warn( - { - did: msg.repo, - commit: msg.commit.toString(), - uri: op.uri.toString(), - cid: op.cid.toString(), - }, - 'skipping indexing of invalid record', - ) - } else { - log.error( - { - err, - did: msg.repo, - commit: msg.commit.toString(), - uri: op.uri.toString(), - cid: op.cid.toString(), - }, - 'skipping indexing due to error processing record', - ) - } - } - } - } - await this.indexingSvc.setCommitLastSeen(root, msg) - } - const results = await Promise.allSettled([ - indexRecords(), - this.indexingSvc.indexHandle(msg.repo, msg.time), - ]) - handleAllSettledErrors(results) - } - - private async handleUpdateHandle(msg: message.Handle) { - await this.indexingSvc.indexHandle(msg.did, msg.time, true) - } - - private async handleTombstone(msg: message.Tombstone) { - await this.indexingSvc.tombstoneActor(msg.did) - } -} - -async function getOps( - msg: message.Commit, -): Promise<{ root: Commit; rootCid: CID; ops: PreparedWrite[] }> { - const car = await readCarWithRoot(msg.blocks as Uint8Array) - const rootBytes = car.blocks.get(car.root) - assert(rootBytes, 'Missing commit block in car slice') - - const root = def.commit.schema.parse(cborDecode(rootBytes)) - const ops: PreparedWrite[] = msg.ops.map((op) => { - const [collection, rkey] = op.path.split('/') - assert(collection && rkey) - if ( - op.action === WriteOpAction.Create || - op.action === WriteOpAction.Update - ) { - assert(op.cid) - const record = car.blocks.get(op.cid) - assert(record) - return { - action: - op.action === WriteOpAction.Create - ? WriteOpAction.Create - : WriteOpAction.Update, - cid: op.cid, - record: cborToLexRecord(record), - blobs: [], - uri: AtUri.make(msg.repo, collection, rkey), - } - } else if (op.action === WriteOpAction.Delete) { - return { - action: WriteOpAction.Delete, - uri: AtUri.make(msg.repo, collection, rkey), - } - } else { - throw new Error(`Unknown repo op action: ${op.action}`) - } - }) - - return { root, rootCid: car.root, ops } -} - -function getEnvelope(val: Record): Envelope { - assert(val.repo && val.event, 'malformed message contents') - return { - repo: val.repo.toString(), - event: cborDecode(val.event) as ProcessableMessage, - } -} - -type Envelope = { - repo: string - event: ProcessableMessage -} - -class Partition { - consecutive = new ConsecutiveList() - cursorQueue = new LatestQueue() - constructor(public id: number, public cursor: number) {} - get key() { - return partitionKey(this.id) - } -} - -function partitionId(key: string) { - assert(key.startsWith('repo:')) - return strToInt(key.replace('repo:', '')) -} - -function partitionKey(p: number) { - return `repo:${p}` -} - -type PreparedCreate = { - action: WriteOpAction.Create - uri: AtUri - cid: CID - record: Record - blobs: CID[] // differs from similar type in pds -} - -type PreparedUpdate = { - action: WriteOpAction.Update - uri: AtUri - cid: CID - record: Record - blobs: CID[] // differs from similar type in pds -} - -type PreparedDelete = { - action: WriteOpAction.Delete - uri: AtUri -} - -type PreparedWrite = PreparedCreate | PreparedUpdate | PreparedDelete diff --git a/packages/bsky/src/ingester/config.ts b/packages/bsky/src/ingester/config.ts deleted file mode 100644 index 969aeeff7aa..00000000000 --- a/packages/bsky/src/ingester/config.ts +++ /dev/null @@ -1,141 +0,0 @@ -import assert from 'assert' - -export interface IngesterConfigValues { - version: string - dbPostgresUrl: string - dbPostgresSchema?: string - redisHost?: string // either set redis host, or both sentinel name and hosts - redisSentinelName?: string - redisSentinelHosts?: string[] - redisPassword?: string - repoProvider: string - ingesterPartitionCount: number - ingesterNamespace?: string - ingesterSubLockId?: number - ingesterMaxItems?: number - ingesterCheckItemsEveryN?: number - ingesterInitialCursor?: number -} - -export class IngesterConfig { - constructor(private cfg: IngesterConfigValues) {} - - static readEnv(overrides?: Partial) { - const version = process.env.BSKY_VERSION || '0.0.0' - const dbPostgresUrl = - overrides?.dbPostgresUrl || process.env.DB_PRIMARY_POSTGRES_URL - const dbPostgresSchema = - overrides?.dbPostgresSchema || process.env.DB_POSTGRES_SCHEMA - const redisHost = - overrides?.redisHost || process.env.REDIS_HOST || undefined - const redisSentinelName = - overrides?.redisSentinelName || - process.env.REDIS_SENTINEL_NAME || - undefined - const redisSentinelHosts = - overrides?.redisSentinelHosts || - (process.env.REDIS_SENTINEL_HOSTS - ? process.env.REDIS_SENTINEL_HOSTS.split(',') - : []) - const redisPassword = - overrides?.redisPassword || process.env.REDIS_PASSWORD || undefined - const repoProvider = overrides?.repoProvider || process.env.REPO_PROVIDER // E.g. ws://abc.com:4000 - const ingesterPartitionCount = - overrides?.ingesterPartitionCount || - maybeParseInt(process.env.INGESTER_PARTITION_COUNT) - const ingesterSubLockId = - overrides?.ingesterSubLockId || - maybeParseInt(process.env.INGESTER_SUB_LOCK_ID) - const ingesterMaxItems = - overrides?.ingesterMaxItems || - maybeParseInt(process.env.INGESTER_MAX_ITEMS) - const ingesterCheckItemsEveryN = - overrides?.ingesterCheckItemsEveryN || - maybeParseInt(process.env.INGESTER_CHECK_ITEMS_EVERY_N) - const ingesterInitialCursor = - overrides?.ingesterInitialCursor || - maybeParseInt(process.env.INGESTER_INITIAL_CURSOR) - const ingesterNamespace = overrides?.ingesterNamespace - assert(dbPostgresUrl) - assert(redisHost || (redisSentinelName && redisSentinelHosts?.length)) - assert(repoProvider) - assert(ingesterPartitionCount) - return new IngesterConfig({ - version, - dbPostgresUrl, - dbPostgresSchema, - redisHost, - redisSentinelName, - redisSentinelHosts, - redisPassword, - repoProvider, - ingesterPartitionCount, - ingesterSubLockId, - ingesterNamespace, - ingesterMaxItems, - ingesterCheckItemsEveryN, - ingesterInitialCursor, - }) - } - - get version() { - return this.cfg.version - } - - get dbPostgresUrl() { - return this.cfg.dbPostgresUrl - } - - get dbPostgresSchema() { - return this.cfg.dbPostgresSchema - } - - get redisHost() { - return this.cfg.redisHost - } - - get redisSentinelName() { - return this.cfg.redisSentinelName - } - - get redisSentinelHosts() { - return this.cfg.redisSentinelHosts - } - - get redisPassword() { - return this.cfg.redisPassword - } - - get repoProvider() { - return this.cfg.repoProvider - } - - get ingesterPartitionCount() { - return this.cfg.ingesterPartitionCount - } - - get ingesterMaxItems() { - return this.cfg.ingesterMaxItems - } - - get ingesterCheckItemsEveryN() { - return this.cfg.ingesterCheckItemsEveryN - } - - get ingesterInitialCursor() { - return this.cfg.ingesterInitialCursor - } - - get ingesterNamespace() { - return this.cfg.ingesterNamespace - } - - get ingesterSubLockId() { - return this.cfg.ingesterSubLockId - } -} - -function maybeParseInt(str) { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} diff --git a/packages/bsky/src/ingester/context.ts b/packages/bsky/src/ingester/context.ts deleted file mode 100644 index 792d3c2015a..00000000000 --- a/packages/bsky/src/ingester/context.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { PrimaryDatabase } from '../db' -import { Redis } from '../redis' -import { IngesterConfig } from './config' - -export class IngesterContext { - constructor( - private opts: { - db: PrimaryDatabase - redis: Redis - cfg: IngesterConfig - }, - ) {} - - get db(): PrimaryDatabase { - return this.opts.db - } - - get redis(): Redis { - return this.opts.redis - } - - get cfg(): IngesterConfig { - return this.opts.cfg - } -} - -export default IngesterContext diff --git a/packages/bsky/src/ingester/index.ts b/packages/bsky/src/ingester/index.ts deleted file mode 100644 index 376da2887da..00000000000 --- a/packages/bsky/src/ingester/index.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { PrimaryDatabase } from '../db' -import log from './logger' -import { dbLogger } from '../logger' -import { Redis } from '../redis' -import { IngesterConfig } from './config' -import { IngesterContext } from './context' -import { IngesterSubscription } from './subscription' - -export { IngesterConfig } from './config' -export type { IngesterConfigValues } from './config' - -export class BskyIngester { - public ctx: IngesterContext - public sub: IngesterSubscription - private dbStatsInterval: NodeJS.Timer - private subStatsInterval: NodeJS.Timer - - constructor(opts: { ctx: IngesterContext; sub: IngesterSubscription }) { - this.ctx = opts.ctx - this.sub = opts.sub - } - - static create(opts: { - db: PrimaryDatabase - redis: Redis - cfg: IngesterConfig - }): BskyIngester { - const { db, redis, cfg } = opts - const ctx = new IngesterContext({ db, redis, cfg }) - const sub = new IngesterSubscription(ctx, { - service: cfg.repoProvider, - subLockId: cfg.ingesterSubLockId, - partitionCount: cfg.ingesterPartitionCount, - maxItems: cfg.ingesterMaxItems, - checkItemsEveryN: cfg.ingesterCheckItemsEveryN, - initialCursor: cfg.ingesterInitialCursor, - }) - return new BskyIngester({ ctx, sub }) - } - - async start() { - const { db } = this.ctx - const pool = db.pool - this.dbStatsInterval = setInterval(() => { - dbLogger.info( - { - idleCount: pool.idleCount, - totalCount: pool.totalCount, - waitingCount: pool.waitingCount, - }, - 'db pool stats', - ) - }, 10000) - this.subStatsInterval = setInterval(() => { - log.info( - { - seq: this.sub.lastSeq, - streamsLength: - this.sub.backpressure.lastTotal !== null - ? this.sub.backpressure.lastTotal - : undefined, - }, - 'ingester stats', - ) - }, 500) - this.sub.run() - return this - } - - async destroy(opts?: { skipDb: boolean }): Promise { - await this.sub.destroy() - clearInterval(this.subStatsInterval) - await this.ctx.redis.destroy() - if (!opts?.skipDb) await this.ctx.db.close() - clearInterval(this.dbStatsInterval) - } -} - -export default BskyIngester diff --git a/packages/bsky/src/ingester/logger.ts b/packages/bsky/src/ingester/logger.ts deleted file mode 100644 index 49855166481..00000000000 --- a/packages/bsky/src/ingester/logger.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { subsystemLogger } from '@atproto/common' - -const logger: ReturnType = - subsystemLogger('bsky:ingester') - -export default logger diff --git a/packages/bsky/src/ingester/subscription.ts b/packages/bsky/src/ingester/subscription.ts deleted file mode 100644 index 14f301e07f9..00000000000 --- a/packages/bsky/src/ingester/subscription.ts +++ /dev/null @@ -1,288 +0,0 @@ -import { - Deferrable, - cborEncode, - createDeferrable, - ui8ToBuffer, - wait, -} from '@atproto/common' -import { randomIntFromSeed } from '@atproto/crypto' -import { DisconnectError, Subscription } from '@atproto/xrpc-server' -import { OutputSchema as Message } from '../lexicon/types/com/atproto/sync/subscribeRepos' -import * as message from '../lexicon/types/com/atproto/sync/subscribeRepos' -import { ids, lexicons } from '../lexicon/lexicons' -import { Leader } from '../db/leader' -import log from './logger' -import { - LatestQueue, - ProcessableMessage, - loggableMessage, - jitter, - strToInt, -} from '../subscription/util' -import { IngesterContext } from './context' - -const METHOD = ids.ComAtprotoSyncSubscribeRepos -const CURSOR_KEY = 'ingester:cursor' -export const INGESTER_SUB_LOCK_ID = 1000 - -export class IngesterSubscription { - cursorQueue = new LatestQueue() - destroyed = false - lastSeq: number | undefined - backpressure = new Backpressure(this) - leader = new Leader(this.opts.subLockId || INGESTER_SUB_LOCK_ID, this.ctx.db) - processor = new Processor(this) - - constructor( - public ctx: IngesterContext, - public opts: { - service: string - partitionCount: number - maxItems?: number - checkItemsEveryN?: number - subLockId?: number - initialCursor?: number - }, - ) {} - - async run() { - while (!this.destroyed) { - try { - const { ran } = await this.leader.run(async ({ signal }) => { - const sub = this.getSubscription({ signal }) - for await (const msg of sub) { - const details = getMessageDetails(msg) - if ('info' in details) { - // These messages are not sequenced, we just log them and carry on - log.warn( - { provider: this.opts.service, message: loggableMessage(msg) }, - `ingester sub ${details.info ? 'info' : 'unknown'} message`, - ) - continue - } - this.processor.send(details) - await this.backpressure.ready() - } - }) - if (ran && !this.destroyed) { - throw new Error('Ingester sub completed, but should be persistent') - } - } catch (err) { - log.error({ err, provider: this.opts.service }, 'ingester sub error') - } - if (!this.destroyed) { - await wait(1000 + jitter(500)) // wait then try to become leader - } - } - } - - async destroy() { - this.destroyed = true - await this.processor.destroy() - await this.cursorQueue.destroy() - this.leader.destroy(new DisconnectError()) - } - - async resume() { - this.destroyed = false - this.processor = new Processor(this) - this.cursorQueue = new LatestQueue() - await this.run() - } - - async getCursor(): Promise { - const val = await this.ctx.redis.get(CURSOR_KEY) - const initialCursor = this.opts.initialCursor ?? 0 - return val !== null ? strToInt(val) : initialCursor - } - - async resetCursor(): Promise { - await this.ctx.redis.del(CURSOR_KEY) - } - - async setCursor(seq: number): Promise { - await this.ctx.redis.set(CURSOR_KEY, seq) - } - - private getSubscription(opts: { signal: AbortSignal }) { - return new Subscription({ - service: this.opts.service, - method: METHOD, - signal: opts.signal, - getParams: async () => { - const cursor = await this.getCursor() - return { cursor } - }, - onReconnectError: (err, reconnects, initial) => { - log.warn({ err, reconnects, initial }, 'ingester sub reconnect') - }, - validate: (value) => { - try { - return lexicons.assertValidXrpcMessage(METHOD, value) - } catch (err) { - log.warn( - { - err, - seq: ifNumber(value?.['seq']), - repo: ifString(value?.['repo']), - commit: ifString(value?.['commit']?.toString()), - time: ifString(value?.['time']), - provider: this.opts.service, - }, - 'ingester sub skipped invalid message', - ) - } - }, - }) - } -} - -function ifString(val: unknown): string | undefined { - return typeof val === 'string' ? val : undefined -} - -function ifNumber(val: unknown): number | undefined { - return typeof val === 'number' ? val : undefined -} - -function getMessageDetails(msg: Message): - | { info: message.Info | null } - | { - seq: number - repo: string - message: ProcessableMessage - } { - if (message.isCommit(msg)) { - return { seq: msg.seq, repo: msg.repo, message: msg } - } else if (message.isHandle(msg)) { - return { seq: msg.seq, repo: msg.did, message: msg } - } else if (message.isMigrate(msg)) { - return { seq: msg.seq, repo: msg.did, message: msg } - } else if (message.isTombstone(msg)) { - return { seq: msg.seq, repo: msg.did, message: msg } - } else if (message.isInfo(msg)) { - return { info: msg } - } - return { info: null } -} - -async function getPartition(did: string, n: number) { - const partition = await randomIntFromSeed(did, n) - return `repo:${partition}` -} - -class Processor { - running: Deferrable | null = null - destroyed = false - unprocessed: MessageEnvelope[] = [] - - constructor(public sub: IngesterSubscription) {} - - async handleBatch(batch: MessageEnvelope[]) { - if (!batch.length) return - const items = await Promise.all( - batch.map(async ({ seq, repo, message }) => { - const key = await getPartition(repo, this.sub.opts.partitionCount) - const fields: [string, string | Buffer][] = [ - ['repo', repo], - ['event', ui8ToBuffer(cborEncode(message))], - ] - return { key, id: seq, fields } - }), - ) - const results = await this.sub.ctx.redis.addMultiToStream(items) - results.forEach(([err], i) => { - if (err) { - // skipping over messages that have already been added or fully processed - const item = batch.at(i) - log.warn( - { seq: item?.seq, repo: item?.repo }, - 'ingester skipping message', - ) - } - }) - const lastSeq = batch[batch.length - 1].seq - this.sub.lastSeq = lastSeq - this.sub.cursorQueue.add(() => this.sub.setCursor(lastSeq)) - } - - async process() { - if (this.running || this.destroyed || !this.unprocessed.length) return - const next = this.unprocessed.splice(100) // pipeline no more than 100 - const processing = this.unprocessed - this.unprocessed = next - this.running = createDeferrable() - try { - await this.handleBatch(processing) - } catch (err) { - log.error( - { err, size: processing.length }, - 'ingester processing failed, rolling over to next batch', - ) - this.unprocessed.unshift(...processing) - } finally { - this.running.resolve() - this.running = null - this.process() - } - } - - send(envelope: MessageEnvelope) { - this.unprocessed.push(envelope) - this.process() - } - - async destroy() { - this.destroyed = true - this.unprocessed = [] - await this.running?.complete - } -} - -type MessageEnvelope = { - seq: number - repo: string - message: ProcessableMessage -} - -class Backpressure { - count = 0 - lastTotal: number | null = null - partitionCount = this.sub.opts.partitionCount - limit = this.sub.opts.maxItems ?? Infinity - checkEvery = this.sub.opts.checkItemsEveryN ?? 500 - - constructor(public sub: IngesterSubscription) {} - - async ready() { - this.count++ - const shouldCheck = - this.limit !== Infinity && - (this.count === 1 || this.count % this.checkEvery === 0) - if (!shouldCheck) return - let ready = false - const start = Date.now() - while (!ready) { - ready = await this.check() - if (!ready) { - log.warn( - { - limit: this.limit, - total: this.lastTotal, - duration: Date.now() - start, - }, - 'ingester backpressure', - ) - await wait(250) - } - } - } - - async check() { - const lens = await this.sub.ctx.redis.streamLengths( - [...Array(this.partitionCount)].map((_, i) => `repo:${i}`), - ) - this.lastTotal = lens.reduce((sum, len) => sum + len, 0) - return this.lastTotal < this.limit - } -} diff --git a/packages/bsky/src/migrate-moderation-data.ts b/packages/bsky/src/migrate-moderation-data.ts deleted file mode 100644 index 6919358170a..00000000000 --- a/packages/bsky/src/migrate-moderation-data.ts +++ /dev/null @@ -1,414 +0,0 @@ -import { sql } from 'kysely' -import { DatabaseCoordinator, PrimaryDatabase } from './index' -import { adjustModerationSubjectStatus } from './services/moderation/status' -import { ModerationEventRow } from './services/moderation/types' - -type ModerationActionRow = Omit & { - reason: string | null -} - -const getEnv = () => ({ - DB_URL: - process.env.MODERATION_MIGRATION_DB_URL || - 'postgresql://pg:password@127.0.0.1:5433/postgres', - DB_POOL_SIZE: Number(process.env.MODERATION_MIGRATION_DB_POOL_SIZE) || 10, - DB_SCHEMA: process.env.MODERATION_MIGRATION_DB_SCHEMA || 'bsky', -}) - -const countEntries = async (db: PrimaryDatabase) => { - const [allActions, allReports] = await Promise.all([ - db.db - // @ts-ignore - .selectFrom('moderation_action') - // @ts-ignore - .select((eb) => eb.fn.count('id').as('count')) - .executeTakeFirstOrThrow(), - db.db - // @ts-ignore - .selectFrom('moderation_report') - // @ts-ignore - .select((eb) => eb.fn.count('id').as('count')) - .executeTakeFirstOrThrow(), - ]) - - return { reportsCount: allReports.count, actionsCount: allActions.count } -} - -const countEvents = async (db: PrimaryDatabase) => { - const events = await db.db - .selectFrom('moderation_event') - .select((eb) => eb.fn.count('id').as('count')) - .executeTakeFirstOrThrow() - - return events.count -} - -const getLatestReportLegacyRefId = async (db: PrimaryDatabase) => { - const events = await db.db - .selectFrom('moderation_event') - .select((eb) => eb.fn.max('legacyRefId').as('latestLegacyRefId')) - .where('action', '=', 'com.atproto.admin.defs#modEventReport') - .executeTakeFirstOrThrow() - - return events.latestLegacyRefId -} - -const countStatuses = async (db: PrimaryDatabase) => { - const events = await db.db - .selectFrom('moderation_subject_status') - .select((eb) => eb.fn.count('id').as('count')) - .executeTakeFirstOrThrow() - - return events.count -} - -const processLegacyReports = async ( - db: PrimaryDatabase, - legacyIds: number[], -) => { - if (!legacyIds.length) { - console.log('No legacy reports to process') - return - } - const reports = await db.db - .selectFrom('moderation_event') - .where('action', '=', 'com.atproto.admin.defs#modEventReport') - .where('legacyRefId', 'in', legacyIds) - .orderBy('legacyRefId', 'asc') - .selectAll() - .execute() - - console.log(`Processing ${reports.length} reports from ${legacyIds.length}`) - await db.transaction(async (tx) => { - // This will be slow but we need to run this in sequence - for (const report of reports) { - await adjustModerationSubjectStatus(tx, report) - } - }) - console.log(`Completed processing ${reports.length} reports`) -} - -const getReportEventsAboveLegacyId = async ( - db: PrimaryDatabase, - aboveLegacyId: number, -) => { - return await db.db - .selectFrom('moderation_event') - .where('action', '=', 'com.atproto.admin.defs#modEventReport') - .where('legacyRefId', '>', aboveLegacyId) - .select(sql`"legacyRefId"`.as('legacyRefId')) - .execute() -} - -const createEvents = async ( - db: PrimaryDatabase, - opts?: { onlyReportsAboveId: number }, -) => { - const commonColumnsToSelect = [ - 'subjectDid', - 'subjectUri', - 'subjectType', - 'subjectCid', - sql`reason`.as('comment'), - 'createdAt', - ] - const commonColumnsToInsert = [ - 'subjectDid', - 'subjectUri', - 'subjectType', - 'subjectCid', - 'comment', - 'createdAt', - 'action', - 'createdBy', - ] as const - - let totalActions: number - if (!opts?.onlyReportsAboveId) { - await db.db - .insertInto('moderation_event') - .columns([ - 'id', - ...commonColumnsToInsert, - 'createLabelVals', - 'negateLabelVals', - 'durationInHours', - 'expiresAt', - ]) - .expression((eb) => - eb - // @ts-ignore - .selectFrom('moderation_action') - // @ts-ignore - .select([ - 'id', - ...commonColumnsToSelect, - sql`CONCAT('com.atproto.admin.defs#modEvent', UPPER(SUBSTRING(SPLIT_PART(action, '#', 2) FROM 1 FOR 1)), SUBSTRING(SPLIT_PART(action, '#', 2) FROM 2))`.as( - 'action', - ), - 'createdBy', - 'createLabelVals', - 'negateLabelVals', - 'durationInHours', - 'expiresAt', - ]) - .orderBy('id', 'asc'), - ) - .execute() - - totalActions = await countEvents(db) - console.log(`Created ${totalActions} events from actions`) - - await sql`SELECT setval(pg_get_serial_sequence('moderation_event', 'id'), (select max(id) from moderation_event))`.execute( - db.db, - ) - console.log('Reset the id sequence for moderation_event') - } else { - totalActions = await countEvents(db) - } - - await db.db - .insertInto('moderation_event') - .columns([...commonColumnsToInsert, 'meta', 'legacyRefId']) - .expression((eb) => { - const builder = eb - // @ts-ignore - .selectFrom('moderation_report') - // @ts-ignore - .select([ - ...commonColumnsToSelect, - sql`'com.atproto.admin.defs#modEventReport'`.as('action'), - sql`"reportedByDid"`.as('createdBy'), - sql`json_build_object('reportType', "reasonType")`.as('meta'), - sql`id`.as('legacyRefId'), - ]) - - if (opts?.onlyReportsAboveId) { - // @ts-ignore - return builder.where('id', '>', opts.onlyReportsAboveId) - } - - return builder - }) - .execute() - - const totalEvents = await countEvents(db) - console.log(`Created ${totalEvents - totalActions} events from reports`) - - return -} - -const setReportedAtTimestamp = async (db: PrimaryDatabase) => { - console.log('Initiating lastReportedAt timestamp sync') - const didUpdate = await sql` - UPDATE moderation_subject_status - SET "lastReportedAt" = reports."createdAt" - FROM ( - select "subjectDid", "subjectUri", MAX("createdAt") as "createdAt" - from moderation_report - where "subjectUri" is null - group by "subjectDid", "subjectUri" - ) as reports - WHERE reports."subjectDid" = moderation_subject_status."did" - AND "recordPath" = '' - AND ("lastReportedAt" is null OR "lastReportedAt" < reports."createdAt") - `.execute(db.db) - - console.log( - `Updated lastReportedAt for ${didUpdate.numUpdatedOrDeletedRows} did subject`, - ) - - const contentUpdate = await sql` - UPDATE moderation_subject_status - SET "lastReportedAt" = reports."createdAt" - FROM ( - select "subjectDid", "subjectUri", MAX("createdAt") as "createdAt" - from moderation_report - where "subjectUri" is not null - group by "subjectDid", "subjectUri" - ) as reports - WHERE reports."subjectDid" = moderation_subject_status."did" - AND "recordPath" is not null - AND POSITION(moderation_subject_status."recordPath" IN reports."subjectUri") > 0 - AND ("lastReportedAt" is null OR "lastReportedAt" < reports."createdAt") - `.execute(db.db) - - console.log( - `Updated lastReportedAt for ${contentUpdate.numUpdatedOrDeletedRows} subject with uri`, - ) -} - -const createStatusFromActions = async (db: PrimaryDatabase) => { - const allEvents = await db.db - // @ts-ignore - .selectFrom('moderation_action') - // @ts-ignore - .where('reversedAt', 'is', null) - // @ts-ignore - .select((eb) => eb.fn.count('id').as('count')) - .executeTakeFirstOrThrow() - - const chunkSize = 2500 - const totalChunks = Math.ceil(allEvents.count / chunkSize) - - console.log(`Processing ${allEvents.count} actions in ${totalChunks} chunks`) - - await db.transaction(async (tx) => { - // This is not used for pagination but only for logging purposes - let currentChunk = 1 - let lastProcessedId: undefined | number = 0 - do { - const eventsQuery = tx.db - // @ts-ignore - .selectFrom('moderation_action') - // @ts-ignore - .where('reversedAt', 'is', null) - // @ts-ignore - .where('id', '>', lastProcessedId) - .limit(chunkSize) - .selectAll() - const events = (await eventsQuery.execute()) as ModerationActionRow[] - - for (const event of events) { - // Remap action to event data type - const actionParts = event.action.split('#') - await adjustModerationSubjectStatus(tx, { - ...event, - action: `com.atproto.admin.defs#modEvent${actionParts[1] - .charAt(0) - .toUpperCase()}${actionParts[1].slice( - 1, - )}` as ModerationEventRow['action'], - comment: event.reason, - meta: null, - }) - } - - console.log(`Processed events chunk ${currentChunk} of ${totalChunks}`) - lastProcessedId = events.at(-1)?.id - currentChunk++ - } while (lastProcessedId !== undefined) - }) - - console.log(`Events migration complete!`) - - const totalStatuses = await countStatuses(db) - console.log(`Created ${totalStatuses} statuses`) -} - -const remapFlagToAcknlowedge = async (db: PrimaryDatabase) => { - console.log('Initiating flag to ack remap') - const results = await sql` - UPDATE moderation_event - SET "action" = 'com.atproto.admin.defs#modEventAcknowledge' - WHERE action = 'com.atproto.admin.defs#modEventFlag' - `.execute(db.db) - console.log(`Remapped ${results.numUpdatedOrDeletedRows} flag actions to ack`) -} - -const syncBlobCids = async (db: PrimaryDatabase) => { - console.log('Initiating blob cid sync') - const results = await sql` - UPDATE moderation_subject_status - SET "blobCids" = blob_action."cids" - FROM ( - SELECT moderation_action."subjectUri", moderation_action."subjectDid", jsonb_agg(moderation_action_subject_blob."cid") as cids - FROM moderation_action_subject_blob - JOIN moderation_action - ON moderation_action.id = moderation_action_subject_blob."actionId" - WHERE moderation_action."reversedAt" is NULL - GROUP by moderation_action."subjectUri", moderation_action."subjectDid" - ) as blob_action - WHERE did = "subjectDid" AND position("recordPath" IN "subjectUri") > 0 - `.execute(db.db) - console.log(`Updated blob cids on ${results.numUpdatedOrDeletedRows} rows`) -} - -async function updateStatusFromUnresolvedReports(db: PrimaryDatabase) { - const { ref } = db.db.dynamic - const reports = await db.db - // @ts-ignore - .selectFrom('moderation_report') - .whereNotExists((qb) => - qb - .selectFrom('moderation_report_resolution') - .selectAll() - // @ts-ignore - .whereRef('reportId', '=', ref('moderation_report.id')), - ) - .select(sql`moderation_report.id`.as('legacyId')) - .execute() - - console.log('Updating statuses based on unresolved reports') - await processLegacyReports( - db, - reports.map((report) => report.legacyId), - ) - console.log('Completed updating statuses based on unresolved reports') -} - -export async function MigrateModerationData() { - const env = getEnv() - const db = new DatabaseCoordinator({ - schema: env.DB_SCHEMA, - primary: { - url: env.DB_URL, - poolSize: env.DB_POOL_SIZE, - }, - replicas: [], - }) - - const primaryDb = db.getPrimary() - - const [counts, existingEventsCount] = await Promise.all([ - countEntries(primaryDb), - countEvents(primaryDb), - ]) - - // If there are existing events in the moderation_event table, we assume that the migration has already been run - // so we just bring over any new reports since last run - if (existingEventsCount) { - console.log( - `Found ${existingEventsCount} existing events. Migrating ${counts.reportsCount} reports only, ignoring actions`, - ) - const reportMigrationStartedAt = Date.now() - const latestReportLegacyRefId = await getLatestReportLegacyRefId(primaryDb) - - if (latestReportLegacyRefId) { - await createEvents(primaryDb, { - onlyReportsAboveId: latestReportLegacyRefId, - }) - const newReportEvents = await getReportEventsAboveLegacyId( - primaryDb, - latestReportLegacyRefId, - ) - await processLegacyReports( - primaryDb, - newReportEvents.map((evt) => evt.legacyRefId), - ) - await setReportedAtTimestamp(primaryDb) - } else { - console.log('No reports have been migrated into events yet, bailing.') - } - - console.log( - `Time spent: ${(Date.now() - reportMigrationStartedAt) / 1000} seconds`, - ) - console.log('Migration complete!') - return - } - - const totalEntries = counts.actionsCount + counts.reportsCount - console.log(`Migrating ${totalEntries} rows of actions and reports`) - const startedAt = Date.now() - await createEvents(primaryDb) - // Important to run this before creation statuses from actions to ensure that we are not attempting to map flag actions - await remapFlagToAcknlowedge(primaryDb) - await createStatusFromActions(primaryDb) - await updateStatusFromUnresolvedReports(primaryDb) - await setReportedAtTimestamp(primaryDb) - await syncBlobCids(primaryDb) - - console.log(`Time spent: ${(Date.now() - startedAt) / 1000 / 60} minutes`) - console.log('Migration complete!') -} diff --git a/packages/bsky/src/services/moderation/index.ts b/packages/bsky/src/services/moderation/index.ts deleted file mode 100644 index 3ba845333d5..00000000000 --- a/packages/bsky/src/services/moderation/index.ts +++ /dev/null @@ -1,656 +0,0 @@ -import { CID } from 'multiformats/cid' -import { AtUri } from '@atproto/syntax' -import { InvalidRequestError } from '@atproto/xrpc-server' -import { PrimaryDatabase } from '../../db' -import { ModerationViews } from './views' -import { ImageUriBuilder } from '../../image/uri' -import { Main as StrongRef } from '../../lexicon/types/com/atproto/repo/strongRef' -import { ImageInvalidator } from '../../image/invalidator' -import { - isModEventComment, - isModEventLabel, - isModEventMute, - isModEventReport, - isModEventTakedown, - isModEventEmail, - RepoRef, - RepoBlobRef, -} from '../../lexicon/types/com/atproto/admin/defs' -import { addHoursToDate } from '../../util/date' -import { - adjustModerationSubjectStatus, - getStatusIdentifierFromSubject, -} from './status' -import { - ModEventType, - ModerationEventRow, - ModerationEventRowWithHandle, - ModerationSubjectStatusRow, - ReversibleModerationEvent, - SubjectInfo, -} from './types' -import { ModerationEvent } from '../../db/tables/moderation' -import { paginate } from '../../db/pagination' -import { StatusKeyset, TimeIdKeyset } from './pagination' - -export class ModerationService { - constructor( - public db: PrimaryDatabase, - public imgUriBuilder: ImageUriBuilder, - public imgInvalidator: ImageInvalidator, - ) {} - - static creator( - imgUriBuilder: ImageUriBuilder, - imgInvalidator: ImageInvalidator, - ) { - return (db: PrimaryDatabase) => - new ModerationService(db, imgUriBuilder, imgInvalidator) - } - - views = new ModerationViews(this.db) - - async getEvent(id: number): Promise { - return await this.db.db - .selectFrom('moderation_event') - .selectAll() - .where('id', '=', id) - .executeTakeFirst() - } - - async getEventOrThrow(id: number): Promise { - const event = await this.getEvent(id) - if (!event) throw new InvalidRequestError('Moderation event not found') - return event - } - - async getEvents(opts: { - subject?: string - createdBy?: string - limit: number - cursor?: string - includeAllUserRecords: boolean - types: ModerationEvent['action'][] - sortDirection?: 'asc' | 'desc' - }): Promise<{ cursor?: string; events: ModerationEventRowWithHandle[] }> { - const { - subject, - createdBy, - limit, - cursor, - includeAllUserRecords, - sortDirection = 'desc', - types, - } = opts - let builder = this.db.db - .selectFrom('moderation_event') - .leftJoin( - 'actor as creatorActor', - 'creatorActor.did', - 'moderation_event.createdBy', - ) - .leftJoin( - 'actor as subjectActor', - 'subjectActor.did', - 'moderation_event.subjectDid', - ) - if (subject) { - builder = builder.where((qb) => { - if (includeAllUserRecords) { - // If subject is an at-uri, we need to extract the DID from the at-uri - // otherwise, subject is probably a DID already - if (subject.startsWith('at://')) { - const uri = new AtUri(subject) - return qb.where('subjectDid', '=', uri.hostname) - } - return qb.where('subjectDid', '=', subject) - } - return qb - .where((subQb) => - subQb - .where('subjectDid', '=', subject) - .where('subjectUri', 'is', null), - ) - .orWhere('subjectUri', '=', subject) - }) - } - if (types.length) { - builder = builder.where((qb) => { - if (types.length === 1) { - return qb.where('action', '=', types[0]) - } - - return qb.where('action', 'in', types) - }) - } - if (createdBy) { - builder = builder.where('createdBy', '=', createdBy) - } - - const { ref } = this.db.db.dynamic - const keyset = new TimeIdKeyset( - ref(`moderation_event.createdAt`), - ref('moderation_event.id'), - ) - const paginatedBuilder = paginate(builder, { - limit, - cursor, - keyset, - direction: sortDirection, - tryIndex: true, - }) - - const result = await paginatedBuilder - .selectAll(['moderation_event']) - .select([ - 'subjectActor.handle as subjectHandle', - 'creatorActor.handle as creatorHandle', - ]) - .execute() - - return { cursor: keyset.packFromResult(result), events: result } - } - - async getReport(id: number): Promise { - return await this.db.db - .selectFrom('moderation_event') - .where('action', '=', 'com.atproto.admin.defs#modEventReport') - .selectAll() - .where('id', '=', id) - .executeTakeFirst() - } - - async getCurrentStatus( - subject: { did: string } | { uri: AtUri } | { cids: CID[] }, - ) { - let builder = this.db.db.selectFrom('moderation_subject_status').selectAll() - if ('did' in subject) { - builder = builder.where('did', '=', subject.did) - } else if ('uri' in subject) { - builder = builder.where('recordPath', '=', subject.uri.toString()) - } - // TODO: Handle the cid status - return await builder.execute() - } - - buildSubjectInfo( - subject: { did: string } | { uri: AtUri; cid: CID }, - subjectBlobCids?: CID[], - ): SubjectInfo { - if ('did' in subject) { - if (subjectBlobCids?.length) { - throw new InvalidRequestError('Blobs do not apply to repo subjects') - } - // Allowing dids that may not exist: may have been deleted but needs to remain actionable. - return { - subjectType: 'com.atproto.admin.defs#repoRef', - subjectDid: subject.did, - subjectUri: null, - subjectCid: null, - } - } - - // Allowing records/blobs that may not exist: may have been deleted but needs to remain actionable. - return { - subjectType: 'com.atproto.repo.strongRef', - subjectDid: subject.uri.host, - subjectUri: subject.uri.toString(), - subjectCid: subject.cid.toString(), - } - } - - async logEvent(info: { - event: ModEventType - subject: { did: string } | { uri: AtUri; cid: CID } - subjectBlobCids?: CID[] - createdBy: string - createdAt?: Date - }): Promise { - this.db.assertTransaction() - const { - event, - createdBy, - subject, - subjectBlobCids, - createdAt = new Date(), - } = info - - // Resolve subject info - const subjectInfo = this.buildSubjectInfo(subject, subjectBlobCids) - - const createLabelVals = - isModEventLabel(event) && event.createLabelVals.length > 0 - ? event.createLabelVals.join(' ') - : undefined - const negateLabelVals = - isModEventLabel(event) && event.negateLabelVals.length > 0 - ? event.negateLabelVals.join(' ') - : undefined - - const meta: Record = {} - - if (isModEventReport(event)) { - meta.reportType = event.reportType - } - - if (isModEventComment(event) && event.sticky) { - meta.sticky = event.sticky - } - - if (isModEventEmail(event)) { - meta.subjectLine = event.subjectLine - } - - const modEvent = await this.db.db - .insertInto('moderation_event') - .values({ - comment: event.comment ? `${event.comment}` : null, - action: event.$type as ModerationEvent['action'], - createdAt: createdAt.toISOString(), - createdBy, - createLabelVals, - negateLabelVals, - durationInHours: event.durationInHours - ? Number(event.durationInHours) - : null, - meta, - expiresAt: - (isModEventTakedown(event) || isModEventMute(event)) && - event.durationInHours - ? addHoursToDate(event.durationInHours, createdAt).toISOString() - : undefined, - ...subjectInfo, - }) - .returningAll() - .executeTakeFirstOrThrow() - - await adjustModerationSubjectStatus(this.db, modEvent, subjectBlobCids) - - return modEvent - } - - async getLastReversibleEventForSubject({ - did, - muteUntil, - recordPath, - suspendUntil, - }: ModerationSubjectStatusRow) { - const isSuspended = suspendUntil && new Date(suspendUntil) < new Date() - const isMuted = muteUntil && new Date(muteUntil) < new Date() - - // If the subject is neither suspended nor muted don't bother finding the last reversible event - // Ideally, this should never happen because the caller of this method should only call this - // after ensuring that the suspended or muted subjects are being reversed - if (!isSuspended && !isMuted) { - return null - } - - let builder = this.db.db - .selectFrom('moderation_event') - .where('subjectDid', '=', did) - - if (recordPath) { - builder = builder.where('subjectUri', 'like', `%${recordPath}%`) - } - - // Means the subject was suspended and needs to be unsuspended - if (isSuspended) { - builder = builder - .where('action', '=', 'com.atproto.admin.defs#modEventTakedown') - .where('durationInHours', 'is not', null) - } - if (isMuted) { - builder = builder - .where('action', '=', 'com.atproto.admin.defs#modEventMute') - .where('durationInHours', 'is not', null) - } - - return await builder - .orderBy('id', 'desc') - .selectAll() - .limit(1) - .executeTakeFirst() - } - - async getSubjectsDueForReversal(): Promise { - const subjectsDueForReversal = await this.db.db - .selectFrom('moderation_subject_status') - .where('suspendUntil', '<', new Date().toISOString()) - .orWhere('muteUntil', '<', new Date().toISOString()) - .selectAll() - .execute() - - return subjectsDueForReversal - } - - async revertState({ - createdBy, - createdAt, - comment, - action, - subject, - }: ReversibleModerationEvent): Promise<{ - result: ModerationEventRow - restored?: TakedownSubjects - }> { - const isRevertingTakedown = - action === 'com.atproto.admin.defs#modEventTakedown' - this.db.assertTransaction() - const result = await this.logEvent({ - event: { - $type: isRevertingTakedown - ? 'com.atproto.admin.defs#modEventReverseTakedown' - : 'com.atproto.admin.defs#modEventUnmute', - comment: comment ?? undefined, - }, - createdAt, - createdBy, - subject, - }) - - let restored: TakedownSubjects | undefined - - if (!isRevertingTakedown) { - return { result, restored } - } - - if ( - result.subjectType === 'com.atproto.admin.defs#repoRef' && - result.subjectDid - ) { - await this.reverseTakedownRepo({ - did: result.subjectDid, - }) - restored = { - did: result.subjectDid, - subjects: [ - { - $type: 'com.atproto.admin.defs#repoRef', - did: result.subjectDid, - }, - ], - } - } - - if ( - result.subjectType === 'com.atproto.repo.strongRef' && - result.subjectUri - ) { - const uri = new AtUri(result.subjectUri) - await this.reverseTakedownRecord({ - uri, - }) - const did = uri.hostname - // TODO: MOD_EVENT This bit needs testing - const subjectStatus = await this.db.db - .selectFrom('moderation_subject_status') - .where('did', '=', uri.host) - .where('recordPath', '=', `${uri.collection}/${uri.rkey}`) - .select('blobCids') - .executeTakeFirst() - const blobCids = subjectStatus?.blobCids || [] - restored = { - did, - subjects: [ - { - $type: 'com.atproto.repo.strongRef', - uri: result.subjectUri, - cid: result.subjectCid ?? '', - }, - ...blobCids.map((cid) => ({ - $type: 'com.atproto.admin.defs#repoBlobRef', - did, - cid, - recordUri: result.subjectUri, - })), - ], - } - } - - return { result, restored } - } - - async takedownRepo(info: { - takedownId: number - did: string - }): Promise { - const { takedownId, did } = info - await this.db.db - .updateTable('actor') - .set({ takedownId }) - .where('did', '=', did) - .where('takedownId', 'is', null) - .executeTakeFirst() - - return { - did, - subjects: [ - { - $type: 'com.atproto.admin.defs#repoRef', - did, - }, - ], - } - } - - async reverseTakedownRepo(info: { did: string }) { - await this.db.db - .updateTable('actor') - .set({ takedownId: null }) - .where('did', '=', info.did) - .execute() - } - - async takedownRecord(info: { - takedownId: number - uri: AtUri - cid: CID - blobCids?: CID[] - }): Promise { - const { takedownId, uri, cid, blobCids } = info - const did = uri.hostname - this.db.assertTransaction() - await this.db.db - .updateTable('record') - .set({ takedownId }) - .where('uri', '=', uri.toString()) - .where('takedownId', 'is', null) - .executeTakeFirst() - if (blobCids) { - await Promise.all( - blobCids.map(async (cid) => { - const paths = ImageUriBuilder.presets.map((id) => { - const imgUri = this.imgUriBuilder.getPresetUri(id, uri.host, cid) - return imgUri.replace(this.imgUriBuilder.endpoint, '') - }) - await this.imgInvalidator.invalidate(cid.toString(), paths) - }), - ) - } - return { - did, - subjects: [ - { - $type: 'com.atproto.repo.strongRef', - uri: uri.toString(), - cid: cid.toString(), - }, - ...(blobCids || []).map((cid) => ({ - $type: 'com.atproto.admin.defs#repoBlobRef', - did, - cid: cid.toString(), - recordUri: uri.toString(), - })), - ], - } - } - - async reverseTakedownRecord(info: { uri: AtUri }) { - this.db.assertTransaction() - await this.db.db - .updateTable('record') - .set({ takedownId: null }) - .where('uri', '=', info.uri.toString()) - .execute() - } - - async report(info: { - reasonType: NonNullable['reportType'] - reason?: string - subject: { did: string } | { uri: AtUri; cid: CID } - reportedBy: string - createdAt?: Date - }): Promise { - const { - reasonType, - reason, - reportedBy, - createdAt = new Date(), - subject, - } = info - - const event = await this.logEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventReport', - reportType: reasonType, - comment: reason, - }, - createdBy: reportedBy, - subject, - createdAt, - }) - - return event - } - - async getSubjectStatuses({ - cursor, - limit = 50, - takendown, - reviewState, - reviewedAfter, - reviewedBefore, - reportedAfter, - reportedBefore, - includeMuted, - ignoreSubjects, - sortDirection, - lastReviewedBy, - sortField, - subject, - }: { - cursor?: string - limit?: number - takendown?: boolean - reviewedBefore?: string - reviewState?: ModerationSubjectStatusRow['reviewState'] - reviewedAfter?: string - reportedAfter?: string - reportedBefore?: string - includeMuted?: boolean - subject?: string - ignoreSubjects?: string[] - sortDirection: 'asc' | 'desc' - lastReviewedBy?: string - sortField: 'lastReviewedAt' | 'lastReportedAt' - }) { - let builder = this.db.db - .selectFrom('moderation_subject_status') - .leftJoin('actor', 'actor.did', 'moderation_subject_status.did') - - if (subject) { - const subjectInfo = getStatusIdentifierFromSubject(subject) - builder = builder - .where('moderation_subject_status.did', '=', subjectInfo.did) - .where((qb) => - subjectInfo.recordPath - ? qb.where('recordPath', '=', subjectInfo.recordPath) - : qb.where('recordPath', '=', ''), - ) - } - - if (ignoreSubjects?.length) { - builder = builder - .where('moderation_subject_status.did', 'not in', ignoreSubjects) - .where('recordPath', 'not in', ignoreSubjects) - } - - if (reviewState) { - builder = builder.where('reviewState', '=', reviewState) - } - - if (lastReviewedBy) { - builder = builder.where('lastReviewedBy', '=', lastReviewedBy) - } - - if (reviewedAfter) { - builder = builder.where('lastReviewedAt', '>', reviewedAfter) - } - - if (reviewedBefore) { - builder = builder.where('lastReviewedAt', '<', reviewedBefore) - } - - if (reportedAfter) { - builder = builder.where('lastReviewedAt', '>', reportedAfter) - } - - if (reportedBefore) { - builder = builder.where('lastReportedAt', '<', reportedBefore) - } - - if (takendown) { - builder = builder.where('takendown', '=', true) - } - - if (!includeMuted) { - builder = builder.where((qb) => - qb - .where('muteUntil', '<', new Date().toISOString()) - .orWhere('muteUntil', 'is', null), - ) - } - - const { ref } = this.db.db.dynamic - const keyset = new StatusKeyset( - ref(`moderation_subject_status.${sortField}`), - ref('moderation_subject_status.id'), - ) - const paginatedBuilder = paginate(builder, { - limit, - cursor, - keyset, - direction: sortDirection, - tryIndex: true, - nullsLast: true, - }) - - const results = await paginatedBuilder - .select('actor.handle as handle') - .selectAll('moderation_subject_status') - .execute() - - return { statuses: results, cursor: keyset.packFromResult(results) } - } - - async isSubjectTakendown( - subject: { did: string } | { uri: AtUri }, - ): Promise { - const { did, recordPath } = getStatusIdentifierFromSubject( - 'did' in subject ? subject.did : subject.uri, - ) - let builder = this.db.db - .selectFrom('moderation_subject_status') - .where('did', '=', did) - .where('recordPath', '=', recordPath || '') - - const result = await builder.select('takendown').executeTakeFirst() - - return !!result?.takendown - } -} - -export type TakedownSubjects = { - did: string - subjects: (RepoRef | RepoBlobRef | StrongRef)[] -} diff --git a/packages/bsky/src/services/moderation/pagination.ts b/packages/bsky/src/services/moderation/pagination.ts deleted file mode 100644 index c68de0822d4..00000000000 --- a/packages/bsky/src/services/moderation/pagination.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' -import { DynamicModule, sql } from 'kysely' - -import { Cursor, GenericKeyset } from '../../db/pagination' - -type StatusKeysetParam = { - lastReviewedAt: string | null - lastReportedAt: string | null - id: number -} - -export class StatusKeyset extends GenericKeyset { - labelResult(result: StatusKeysetParam): Cursor - labelResult(result: StatusKeysetParam) { - const primaryField = ( - this.primary as ReturnType - ).dynamicReference.includes('lastReviewedAt') - ? 'lastReviewedAt' - : 'lastReportedAt' - - return { - primary: result[primaryField] - ? new Date(`${result[primaryField]}`).getTime().toString() - : '', - secondary: result.id.toString(), - } - } - labeledResultToCursor(labeled: Cursor) { - return { - primary: labeled.primary, - secondary: labeled.secondary, - } - } - cursorToLabeledResult(cursor: Cursor) { - return { - primary: cursor.primary - ? new Date(parseInt(cursor.primary, 10)).toISOString() - : '', - secondary: cursor.secondary, - } - } - unpackCursor(cursorStr?: string): Cursor | undefined { - if (!cursorStr) return - const result = cursorStr.split('::') - const [primary, secondary, ...others] = result - if (!secondary || others.length > 0) { - throw new InvalidRequestError('Malformed cursor') - } - return { - primary, - secondary, - } - } - // This is specifically built to handle nullable columns as primary sorting column - getSql(labeled?: Cursor, direction?: 'asc' | 'desc') { - if (labeled === undefined) return - if (direction === 'asc') { - return !labeled.primary - ? sql`(${this.primary} IS NULL AND ${this.secondary} > ${labeled.secondary})` - : sql`((${this.primary}, ${this.secondary}) > (${labeled.primary}, ${labeled.secondary}) OR (${this.primary} is null))` - } else { - return !labeled.primary - ? sql`(${this.primary} IS NULL AND ${this.secondary} < ${labeled.secondary})` - : sql`((${this.primary}, ${this.secondary}) < (${labeled.primary}, ${labeled.secondary}) OR (${this.primary} is null))` - } - } -} - -type TimeIdKeysetParam = { - id: number - createdAt: string -} -type TimeIdResult = TimeIdKeysetParam - -export class TimeIdKeyset extends GenericKeyset { - labelResult(result: TimeIdResult): Cursor - labelResult(result: TimeIdResult) { - return { primary: result.createdAt, secondary: result.id.toString() } - } - labeledResultToCursor(labeled: Cursor) { - return { - primary: new Date(labeled.primary).getTime().toString(), - secondary: labeled.secondary, - } - } - cursorToLabeledResult(cursor: Cursor) { - const primaryDate = new Date(parseInt(cursor.primary, 10)) - if (isNaN(primaryDate.getTime())) { - throw new InvalidRequestError('Malformed cursor') - } - return { - primary: primaryDate.toISOString(), - secondary: cursor.secondary, - } - } -} diff --git a/packages/bsky/src/services/moderation/status.ts b/packages/bsky/src/services/moderation/status.ts deleted file mode 100644 index 41fb3873226..00000000000 --- a/packages/bsky/src/services/moderation/status.ts +++ /dev/null @@ -1,244 +0,0 @@ -// This may require better organization but for now, just dumping functions here containing DB queries for moderation status - -import { AtUri } from '@atproto/syntax' -import { PrimaryDatabase } from '../../db' -import { - ModerationEvent, - ModerationSubjectStatus, -} from '../../db/tables/moderation' -import { - REVIEWOPEN, - REVIEWCLOSED, - REVIEWESCALATED, -} from '../../lexicon/types/com/atproto/admin/defs' -import { ModerationEventRow, ModerationSubjectStatusRow } from './types' -import { HOUR } from '@atproto/common' -import { CID } from 'multiformats/cid' -import { sql } from 'kysely' - -const getSubjectStatusForModerationEvent = ({ - action, - createdBy, - createdAt, - durationInHours, -}: { - action: string - createdBy: string - createdAt: string - durationInHours: number | null -}): Partial | null => { - switch (action) { - case 'com.atproto.admin.defs#modEventAcknowledge': - return { - lastReviewedBy: createdBy, - reviewState: REVIEWCLOSED, - lastReviewedAt: createdAt, - } - case 'com.atproto.admin.defs#modEventReport': - return { - reviewState: REVIEWOPEN, - lastReportedAt: createdAt, - } - case 'com.atproto.admin.defs#modEventEscalate': - return { - lastReviewedBy: createdBy, - reviewState: REVIEWESCALATED, - lastReviewedAt: createdAt, - } - case 'com.atproto.admin.defs#modEventReverseTakedown': - return { - lastReviewedBy: createdBy, - reviewState: REVIEWCLOSED, - takendown: false, - suspendUntil: null, - lastReviewedAt: createdAt, - } - case 'com.atproto.admin.defs#modEventUnmute': - return { - lastReviewedBy: createdBy, - muteUntil: null, - reviewState: REVIEWOPEN, - lastReviewedAt: createdAt, - } - case 'com.atproto.admin.defs#modEventTakedown': - return { - takendown: true, - lastReviewedBy: createdBy, - reviewState: REVIEWCLOSED, - lastReviewedAt: createdAt, - suspendUntil: durationInHours - ? new Date(Date.now() + durationInHours * HOUR).toISOString() - : null, - } - case 'com.atproto.admin.defs#modEventMute': - return { - lastReviewedBy: createdBy, - reviewState: REVIEWOPEN, - lastReviewedAt: createdAt, - // By default, mute for 24hrs - muteUntil: new Date( - Date.now() + (durationInHours || 24) * HOUR, - ).toISOString(), - } - case 'com.atproto.admin.defs#modEventComment': - return { - lastReviewedBy: createdBy, - lastReviewedAt: createdAt, - } - default: - return null - } -} - -// Based on a given moderation action event, this function will update the moderation status of the subject -// If there's no existing status, it will create one -// If the action event does not affect the status, it will do nothing -export const adjustModerationSubjectStatus = async ( - db: PrimaryDatabase, - moderationEvent: ModerationEventRow, - blobCids?: CID[], -) => { - const { - action, - subjectDid, - subjectUri, - subjectCid, - createdBy, - meta, - comment, - createdAt, - } = moderationEvent - - const subjectStatus = getSubjectStatusForModerationEvent({ - action, - createdBy, - createdAt, - durationInHours: moderationEvent.durationInHours, - }) - - // If there are no subjectStatus that means there are no side-effect of the incoming event - if (!subjectStatus) { - return null - } - - const now = new Date().toISOString() - // If subjectUri exists, it's not a repoRef so pass along the uri to get identifier back - const identifier = getStatusIdentifierFromSubject(subjectUri || subjectDid) - - db.assertTransaction() - - const currentStatus = await db.db - .selectFrom('moderation_subject_status') - .where('did', '=', identifier.did) - .where('recordPath', '=', identifier.recordPath) - .selectAll() - .executeTakeFirst() - - if ( - currentStatus?.reviewState === REVIEWESCALATED && - subjectStatus.reviewState === REVIEWOPEN - ) { - // If the current status is escalated and the incoming event is to open the review - // We want to keep the status as escalated - subjectStatus.reviewState = REVIEWESCALATED - } - - // Set these because we don't want to override them if they're already set - const defaultData = { - comment: null, - // Defaulting reviewState to open for any event may not be the desired behavior. - // For instance, if a subject never had any event and we just want to leave a comment to keep an eye on it - // that shouldn't mean we want to review the subject - reviewState: REVIEWOPEN, - recordCid: subjectCid || null, - } - const newStatus = { - ...defaultData, - ...subjectStatus, - } - - if ( - action === 'com.atproto.admin.defs#modEventReverseTakedown' && - !subjectStatus.takendown - ) { - newStatus.takendown = false - subjectStatus.takendown = false - } - - if (action === 'com.atproto.admin.defs#modEventComment' && meta?.sticky) { - newStatus.comment = comment - subjectStatus.comment = comment - } - - if (blobCids?.length) { - const newBlobCids = sql`${JSON.stringify( - blobCids.map((c) => c.toString()), - )}` as unknown as ModerationSubjectStatusRow['blobCids'] - newStatus.blobCids = newBlobCids - subjectStatus.blobCids = newBlobCids - } - - const insertQuery = db.db - .insertInto('moderation_subject_status') - .values({ - ...identifier, - ...newStatus, - createdAt: now, - updatedAt: now, - // TODO: Need to get the types right here. - } as ModerationSubjectStatusRow) - .onConflict((oc) => - oc.constraint('moderation_status_unique_idx').doUpdateSet({ - ...subjectStatus, - updatedAt: now, - }), - ) - - const status = await insertQuery.executeTakeFirst() - return status -} - -type ModerationSubjectStatusFilter = - | Pick - | Pick - | Pick -export const getModerationSubjectStatus = async ( - db: PrimaryDatabase, - filters: ModerationSubjectStatusFilter, -) => { - let builder = db.db - .selectFrom('moderation_subject_status') - // DID will always be passed at the very least - .where('did', '=', filters.did) - .where('recordPath', '=', 'recordPath' in filters ? filters.recordPath : '') - - if ('recordCid' in filters) { - builder = builder.where('recordCid', '=', filters.recordCid) - } else { - builder = builder.where('recordCid', 'is', null) - } - - return builder.executeTakeFirst() -} - -export const getStatusIdentifierFromSubject = ( - subject: string | AtUri, -): { did: string; recordPath: string } => { - const isSubjectString = typeof subject === 'string' - if (isSubjectString && subject.startsWith('did:')) { - return { - did: subject, - recordPath: '', - } - } - - if (isSubjectString && !subject.startsWith('at://')) { - throw new Error('Subject is neither a did nor an at-uri') - } - - const uri = isSubjectString ? new AtUri(subject) : subject - return { - did: uri.host, - recordPath: `${uri.collection}/${uri.rkey}`, - } -} diff --git a/packages/bsky/src/services/moderation/types.ts b/packages/bsky/src/services/moderation/types.ts deleted file mode 100644 index 77a8baf71ff..00000000000 --- a/packages/bsky/src/services/moderation/types.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { Selectable } from 'kysely' -import { - ModerationEvent, - ModerationSubjectStatus, -} from '../../db/tables/moderation' -import { AtUri } from '@atproto/syntax' -import { CID } from 'multiformats/cid' -import { ComAtprotoAdminDefs } from '@atproto/api' - -export type SubjectInfo = - | { - subjectType: 'com.atproto.admin.defs#repoRef' - subjectDid: string - subjectUri: null - subjectCid: null - } - | { - subjectType: 'com.atproto.repo.strongRef' - subjectDid: string - subjectUri: string - subjectCid: string - } - -export type ModerationEventRow = Selectable -export type ReversibleModerationEvent = Pick< - ModerationEventRow, - 'createdBy' | 'comment' | 'action' -> & { - createdAt?: Date - subject: { did: string } | { uri: AtUri; cid: CID } -} - -export type ModerationEventRowWithHandle = ModerationEventRow & { - subjectHandle?: string | null - creatorHandle?: string | null -} -export type ModerationSubjectStatusRow = Selectable -export type ModerationSubjectStatusRowWithHandle = - ModerationSubjectStatusRow & { handle: string | null } - -export type ModEventType = - | ComAtprotoAdminDefs.ModEventTakedown - | ComAtprotoAdminDefs.ModEventAcknowledge - | ComAtprotoAdminDefs.ModEventEscalate - | ComAtprotoAdminDefs.ModEventComment - | ComAtprotoAdminDefs.ModEventLabel - | ComAtprotoAdminDefs.ModEventReport - | ComAtprotoAdminDefs.ModEventMute - | ComAtprotoAdminDefs.ModEventReverseTakedown diff --git a/packages/bsky/src/services/moderation/views.ts b/packages/bsky/src/services/moderation/views.ts deleted file mode 100644 index 2dc9c5ec7e4..00000000000 --- a/packages/bsky/src/services/moderation/views.ts +++ /dev/null @@ -1,549 +0,0 @@ -import { sql } from 'kysely' -import { ArrayEl } from '@atproto/common' -import { AtUri } from '@atproto/syntax' -import { INVALID_HANDLE } from '@atproto/syntax' -import { BlobRef, jsonStringToLex } from '@atproto/lexicon' -import { Database } from '../../db' -import { Actor } from '../../db/tables/actor' -import { Record as RecordRow } from '../../db/tables/record' -import { - ModEventView, - RepoView, - RepoViewDetail, - RecordView, - RecordViewDetail, - ReportViewDetail, - BlobView, - SubjectStatusView, - ModEventViewDetail, -} from '../../lexicon/types/com/atproto/admin/defs' -import { OutputSchema as ReportOutput } from '../../lexicon/types/com/atproto/moderation/createReport' -import { Label } from '../../lexicon/types/com/atproto/label/defs' -import { - ModerationEventRowWithHandle, - ModerationSubjectStatusRowWithHandle, -} from './types' -import { getSelfLabels } from '../label' -import { REASONOTHER } from '../../lexicon/types/com/atproto/moderation/defs' - -export class ModerationViews { - constructor(private db: Database) {} - - repo(result: RepoResult): Promise - repo(result: RepoResult[]): Promise - async repo( - result: RepoResult | RepoResult[], - ): Promise { - const results = Array.isArray(result) ? result : [result] - if (results.length === 0) return [] - - const [info, subjectStatuses] = await Promise.all([ - await this.db.db - .selectFrom('actor') - .leftJoin('profile', 'profile.creator', 'actor.did') - .leftJoin( - 'record as profile_record', - 'profile_record.uri', - 'profile.uri', - ) - .where( - 'actor.did', - 'in', - results.map((r) => r.did), - ) - .select(['actor.did as did', 'profile_record.json as profileJson']) - .execute(), - this.getSubjectStatus(results.map((r) => ({ did: r.did }))), - ]) - - const infoByDid = info.reduce( - (acc, cur) => Object.assign(acc, { [cur.did]: cur }), - {} as Record>, - ) - const subjectStatusByDid = subjectStatuses.reduce( - (acc, cur) => - Object.assign(acc, { [cur.did ?? '']: this.subjectStatus(cur) }), - {}, - ) - - const views = results.map((r) => { - const { profileJson } = infoByDid[r.did] ?? {} - const relatedRecords: object[] = [] - if (profileJson) { - relatedRecords.push( - jsonStringToLex(profileJson) as Record, - ) - } - return { - // No email or invite info on appview - did: r.did, - handle: r.handle ?? INVALID_HANDLE, - relatedRecords, - indexedAt: r.indexedAt, - moderation: { - subjectStatus: subjectStatusByDid[r.did] ?? undefined, - }, - } - }) - - return Array.isArray(result) ? views : views[0] - } - event(result: EventResult): Promise - event(result: EventResult[]): Promise - async event( - result: EventResult | EventResult[], - ): Promise { - const results = Array.isArray(result) ? result : [result] - if (results.length === 0) return [] - - const views = results.map((res) => { - const eventView: ModEventView = { - id: res.id, - event: { - $type: res.action, - comment: res.comment ?? undefined, - }, - subject: - res.subjectType === 'com.atproto.admin.defs#repoRef' - ? { - $type: 'com.atproto.admin.defs#repoRef', - did: res.subjectDid, - } - : { - $type: 'com.atproto.repo.strongRef', - uri: res.subjectUri, - cid: res.subjectCid, - }, - subjectBlobCids: [], - createdBy: res.createdBy, - createdAt: res.createdAt, - subjectHandle: res.subjectHandle ?? undefined, - creatorHandle: res.creatorHandle ?? undefined, - } - - if ( - [ - 'com.atproto.admin.defs#modEventTakedown', - 'com.atproto.admin.defs#modEventMute', - ].includes(res.action) - ) { - eventView.event = { - ...eventView.event, - durationInHours: res.durationInHours ?? undefined, - } - } - - if (res.action === 'com.atproto.admin.defs#modEventLabel') { - eventView.event = { - ...eventView.event, - createLabelVals: res.createLabelVals?.length - ? res.createLabelVals.split(' ') - : [], - negateLabelVals: res.negateLabelVals?.length - ? res.negateLabelVals.split(' ') - : [], - } - } - - // This is for legacy data only, for new events, these types of events won't have labels attached - if ( - [ - 'com.atproto.admin.defs#modEventAcknowledge', - 'com.atproto.admin.defs#modEventTakedown', - 'com.atproto.admin.defs#modEventEscalate', - ].includes(res.action) - ) { - if (res.createLabelVals?.length) { - eventView.event = { - ...eventView.event, - createLabelVals: res.createLabelVals.split(' '), - } - } - - if (res.negateLabelVals?.length) { - eventView.event = { - ...eventView.event, - negateLabelVals: res.negateLabelVals.split(' '), - } - } - } - - if (res.action === 'com.atproto.admin.defs#modEventReport') { - eventView.event = { - ...eventView.event, - reportType: res.meta?.reportType ?? undefined, - } - } - - if (res.action === 'com.atproto.admin.defs#modEventEmail') { - eventView.event = { - ...eventView.event, - subjectLine: res.meta?.subjectLine ?? '', - } - } - - if ( - res.action === 'com.atproto.admin.defs#modEventComment' && - res.meta?.sticky - ) { - eventView.event.sticky = true - } - - return eventView - }) - - return Array.isArray(result) ? views : views[0] - } - - async eventDetail(result: EventResult): Promise { - const [event, subject] = await Promise.all([ - this.event(result), - this.subject(result), - ]) - const allBlobs = findBlobRefs(subject.value) - const subjectBlobs = await this.blob( - allBlobs.filter((blob) => - event.subjectBlobCids.includes(blob.ref.toString()), - ), - ) - return { - ...event, - subject, - subjectBlobs, - } - } - - async repoDetail(result: RepoResult): Promise { - const [repo, labels] = await Promise.all([ - this.repo(result), - this.labels(result.did), - ]) - - return { - ...repo, - moderation: { - ...repo.moderation, - }, - labels, - } - } - - record(result: RecordResult): Promise - record(result: RecordResult[]): Promise - async record( - result: RecordResult | RecordResult[], - ): Promise { - const results = Array.isArray(result) ? result : [result] - if (results.length === 0) return [] - - const [repoResults, subjectStatuses] = await Promise.all([ - this.db.db - .selectFrom('actor') - .where( - 'actor.did', - 'in', - results.map((r) => didFromUri(r.uri)), - ) - .selectAll() - .execute(), - this.getSubjectStatus(results.map((r) => didAndRecordPathFromUri(r.uri))), - ]) - const repos = await this.repo(repoResults) - - const reposByDid = repos.reduce( - (acc, cur) => Object.assign(acc, { [cur.did]: cur }), - {} as Record>, - ) - const subjectStatusByUri = subjectStatuses.reduce( - (acc, cur) => - Object.assign(acc, { - [`${cur.did}/${cur.recordPath}` ?? '']: this.subjectStatus(cur), - }), - {}, - ) - - const views = results.map((res) => { - const repo = reposByDid[didFromUri(res.uri)] - const { did, recordPath } = didAndRecordPathFromUri(res.uri) - const subjectStatus = subjectStatusByUri[`${did}/${recordPath}`] - if (!repo) throw new Error(`Record repo is missing: ${res.uri}`) - const value = jsonStringToLex(res.json) as Record - return { - uri: res.uri, - cid: res.cid, - value, - blobCids: findBlobRefs(value).map((blob) => blob.ref.toString()), - indexedAt: res.indexedAt, - repo, - moderation: { - subjectStatus, - }, - } - }) - - return Array.isArray(result) ? views : views[0] - } - - async recordDetail(result: RecordResult): Promise { - const [record, subjectStatusResult] = await Promise.all([ - this.record(result), - this.getSubjectStatus(didAndRecordPathFromUri(result.uri)), - ]) - - const [blobs, labels, subjectStatus] = await Promise.all([ - this.blob(findBlobRefs(record.value)), - this.labels(record.uri), - subjectStatusResult?.length - ? this.subjectStatus(subjectStatusResult[0]) - : Promise.resolve(undefined), - ]) - const selfLabels = getSelfLabels({ - uri: result.uri, - cid: result.cid, - record: jsonStringToLex(result.json) as Record, - }) - return { - ...record, - blobs, - moderation: { - ...record.moderation, - subjectStatus, - }, - labels: [...labels, ...selfLabels], - } - } - reportPublic(report: ReportResult): ReportOutput { - return { - id: report.id, - createdAt: report.createdAt, - // Ideally, we would never have a report entry that does not have a reasonType but at the schema level - // we are not guarantying that so in whatever case, if we end up with such entries, default to 'other' - reasonType: report.meta?.reportType - ? (report.meta?.reportType as string) - : REASONOTHER, - reason: report.comment ?? undefined, - reportedBy: report.createdBy, - subject: - report.subjectType === 'com.atproto.admin.defs#repoRef' - ? { - $type: 'com.atproto.admin.defs#repoRef', - did: report.subjectDid, - } - : { - $type: 'com.atproto.repo.strongRef', - uri: report.subjectUri, - cid: report.subjectCid, - }, - } - } - // Partial view for subjects - - async subject(result: SubjectResult): Promise { - let subject: SubjectView - if (result.subjectType === 'com.atproto.admin.defs#repoRef') { - const repoResult = await this.db.db - .selectFrom('actor') - .selectAll() - .where('did', '=', result.subjectDid) - .executeTakeFirst() - if (repoResult) { - subject = await this.repo(repoResult) - subject.$type = 'com.atproto.admin.defs#repoView' - } else { - subject = { did: result.subjectDid } - subject.$type = 'com.atproto.admin.defs#repoViewNotFound' - } - } else if ( - result.subjectType === 'com.atproto.repo.strongRef' && - result.subjectUri !== null - ) { - const recordResult = await this.db.db - .selectFrom('record') - .selectAll() - .where('uri', '=', result.subjectUri) - .executeTakeFirst() - if (recordResult) { - subject = await this.record(recordResult) - subject.$type = 'com.atproto.admin.defs#recordView' - } else { - subject = { uri: result.subjectUri } - subject.$type = 'com.atproto.admin.defs#recordViewNotFound' - } - } else { - throw new Error(`Bad subject data: (${result.id}) ${result.subjectType}`) - } - return subject - } - - // Partial view for blobs - - async blob(blobs: BlobRef[]): Promise { - if (!blobs.length) return [] - const { ref } = this.db.db.dynamic - const modStatusResults = await this.db.db - .selectFrom('moderation_subject_status') - .where( - sql`${ref( - 'moderation_subject_status.blobCids', - )} @> ${JSON.stringify(blobs.map((blob) => blob.ref.toString()))}`, - ) - .selectAll() - .executeTakeFirst() - const statusByCid = (modStatusResults?.blobCids || [])?.reduce( - (acc, cur) => Object.assign(acc, { [cur]: modStatusResults }), - {}, - ) - // Intentionally missing details field, since we don't have any on appview. - // We also don't know when the blob was created, so we use a canned creation time. - const unknownTime = new Date(0).toISOString() - return blobs.map((blob) => { - const cid = blob.ref.toString() - const subjectStatus = statusByCid[cid] - ? this.subjectStatus(statusByCid[cid]) - : undefined - return { - cid, - mimeType: blob.mimeType, - size: blob.size, - createdAt: unknownTime, - moderation: { - subjectStatus, - }, - } - }) - } - - async labels(subject: string, includeNeg?: boolean): Promise { - const res = await this.db.db - .selectFrom('label') - .where('label.uri', '=', subject) - .if(!includeNeg, (qb) => qb.where('neg', '=', false)) - .selectAll() - .execute() - return res.map((l) => ({ - ...l, - cid: l.cid === '' ? undefined : l.cid, - neg: l.neg, - })) - } - - async getSubjectStatus( - subject: - | { did: string; recordPath?: string } - | { did: string; recordPath?: string }[], - ): Promise { - const subjectFilters = Array.isArray(subject) ? subject : [subject] - const filterForSubject = - ({ did, recordPath }: { did: string; recordPath?: string }) => - // TODO: Fix the typing here? - (clause: any) => { - clause = clause - .where('moderation_subject_status.did', '=', did) - .where('moderation_subject_status.recordPath', '=', recordPath || '') - return clause - } - - const builder = this.db.db - .selectFrom('moderation_subject_status') - .leftJoin('actor', 'actor.did', 'moderation_subject_status.did') - .where((clause) => { - subjectFilters.forEach(({ did, recordPath }, i) => { - const applySubjectFilter = filterForSubject({ did, recordPath }) - if (i === 0) { - clause = clause.where(applySubjectFilter) - } else { - clause = clause.orWhere(applySubjectFilter) - } - }) - - return clause - }) - .selectAll('moderation_subject_status') - .select('actor.handle as handle') - - return builder.execute() - } - - subjectStatus(result: ModerationSubjectStatusRowWithHandle): SubjectStatusView - subjectStatus( - result: ModerationSubjectStatusRowWithHandle[], - ): SubjectStatusView[] - subjectStatus( - result: - | ModerationSubjectStatusRowWithHandle - | ModerationSubjectStatusRowWithHandle[], - ): SubjectStatusView | SubjectStatusView[] { - const results = Array.isArray(result) ? result : [result] - if (results.length === 0) return [] - - const decoratedSubjectStatuses = results.map((subjectStatus) => ({ - id: subjectStatus.id, - reviewState: subjectStatus.reviewState, - createdAt: subjectStatus.createdAt, - updatedAt: subjectStatus.updatedAt, - comment: subjectStatus.comment ?? undefined, - lastReviewedBy: subjectStatus.lastReviewedBy ?? undefined, - lastReviewedAt: subjectStatus.lastReviewedAt ?? undefined, - lastReportedAt: subjectStatus.lastReportedAt ?? undefined, - muteUntil: subjectStatus.muteUntil ?? undefined, - suspendUntil: subjectStatus.suspendUntil ?? undefined, - takendown: subjectStatus.takendown ?? undefined, - subjectRepoHandle: subjectStatus.handle ?? undefined, - subjectBlobCids: subjectStatus.blobCids || [], - subject: !subjectStatus.recordPath - ? { - $type: 'com.atproto.admin.defs#repoRef', - did: subjectStatus.did, - } - : { - $type: 'com.atproto.repo.strongRef', - uri: AtUri.make( - subjectStatus.did, - // Not too intuitive but the recordpath is basically / - // which is what the last 2 params of .make() arguments are - ...subjectStatus.recordPath.split('/'), - ).toString(), - cid: subjectStatus.recordCid, - }, - })) - - return Array.isArray(result) - ? decoratedSubjectStatuses - : decoratedSubjectStatuses[0] - } -} - -type RepoResult = Actor - -type EventResult = ModerationEventRowWithHandle - -type ReportResult = ModerationEventRowWithHandle - -type RecordResult = RecordRow - -type SubjectResult = Pick< - EventResult & ReportResult, - 'id' | 'subjectType' | 'subjectDid' | 'subjectUri' | 'subjectCid' -> - -type SubjectView = ModEventViewDetail['subject'] & ReportViewDetail['subject'] - -function didFromUri(uri: string) { - return new AtUri(uri).host -} - -function didAndRecordPathFromUri(uri: string) { - const atUri = new AtUri(uri) - return { did: atUri.host, recordPath: `${atUri.collection}/${atUri.rkey}` } -} - -function findBlobRefs(value: unknown, refs: BlobRef[] = []) { - if (value instanceof BlobRef) { - refs.push(value) - } else if (Array.isArray(value)) { - value.forEach((val) => findBlobRefs(val, refs)) - } else if (value && typeof value === 'object') { - Object.values(value).forEach((val) => findBlobRefs(val, refs)) - } - return refs -} diff --git a/services/bsky/daemon.js b/services/bsky/daemon.js deleted file mode 100644 index bd8322ab58f..00000000000 --- a/services/bsky/daemon.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict' /* eslint-disable */ - -require('dd-trace/init') // Only works with commonjs - -// Tracer code above must come before anything else -const { PrimaryDatabase, DaemonConfig, BskyDaemon } = require('@atproto/bsky') - -const main = async () => { - const env = getEnv() - const db = new PrimaryDatabase({ - url: env.dbPostgresUrl, - schema: env.dbPostgresSchema, - poolSize: env.dbPoolSize, - poolMaxUses: env.dbPoolMaxUses, - poolIdleTimeoutMs: env.dbPoolIdleTimeoutMs, - }) - const cfg = DaemonConfig.readEnv({ - version: env.version, - dbPostgresUrl: env.dbPostgresUrl, - dbPostgresSchema: env.dbPostgresSchema, - }) - const daemon = BskyDaemon.create({ db, cfg }) - await daemon.start() - process.on('SIGTERM', async () => { - await daemon.destroy() - }) -} - -const getEnv = () => ({ - version: process.env.BSKY_VERSION, - dbPostgresUrl: - process.env.DB_PRIMARY_POSTGRES_URL || process.env.DB_POSTGRES_URL, - dbPostgresSchema: process.env.DB_POSTGRES_SCHEMA || undefined, - dbPoolSize: maybeParseInt(process.env.DB_POOL_SIZE), - dbPoolMaxUses: maybeParseInt(process.env.DB_POOL_MAX_USES), - dbPoolIdleTimeoutMs: maybeParseInt(process.env.DB_POOL_IDLE_TIMEOUT_MS), -}) - -const maybeParseInt = (str) => { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} - -main() diff --git a/services/bsky/indexer.js b/services/bsky/indexer.js deleted file mode 100644 index beac2c114d7..00000000000 --- a/services/bsky/indexer.js +++ /dev/null @@ -1,110 +0,0 @@ -'use strict' /* eslint-disable */ - -require('dd-trace/init') // Only works with commonjs - -// Tracer code above must come before anything else -const { CloudfrontInvalidator, BunnyInvalidator } = require('@atproto/aws') -const { - IndexerConfig, - BskyIndexer, - Redis, - PrimaryDatabase, -} = require('@atproto/bsky') - -const main = async () => { - const env = getEnv() - const db = new PrimaryDatabase({ - url: env.dbPostgresUrl, - schema: env.dbPostgresSchema, - poolSize: env.dbPoolSize, - poolMaxUses: env.dbPoolMaxUses, - poolIdleTimeoutMs: env.dbPoolIdleTimeoutMs, - }) - const cfg = IndexerConfig.readEnv({ - version: env.version, - dbPostgresUrl: env.dbPostgresUrl, - dbPostgresSchema: env.dbPostgresSchema, - }) - - // configure zero, one, or both image invalidators - let imgInvalidator - const bunnyInvalidator = env.bunnyAccessKey - ? new BunnyInvalidator({ - accessKey: env.bunnyAccessKey, - urlPrefix: cfg.imgUriEndpoint, - }) - : undefined - const cfInvalidator = env.cfDistributionId - ? new CloudfrontInvalidator({ - distributionId: env.cfDistributionId, - pathPrefix: cfg.imgUriEndpoint && new URL(cfg.imgUriEndpoint).pathname, - }) - : undefined - if (bunnyInvalidator && imgInvalidator) { - imgInvalidator = new MultiImageInvalidator([ - bunnyInvalidator, - imgInvalidator, - ]) - } else if (bunnyInvalidator) { - imgInvalidator = bunnyInvalidator - } else if (cfInvalidator) { - imgInvalidator = cfInvalidator - } - - const redis = new Redis( - cfg.redisSentinelName - ? { - sentinel: cfg.redisSentinelName, - hosts: cfg.redisSentinelHosts, - password: cfg.redisPassword, - } - : { - host: cfg.redisHost, - password: cfg.redisPassword, - }, - ) - const indexer = BskyIndexer.create({ - db, - redis, - cfg, - imgInvalidator, - }) - await indexer.start() - process.on('SIGTERM', async () => { - await indexer.destroy() - }) -} - -// Also accepts the following in readEnv(): -// - REDIS_HOST -// - REDIS_SENTINEL_NAME -// - REDIS_SENTINEL_HOSTS -// - REDIS_PASSWORD -// - DID_PLC_URL -// - DID_CACHE_STALE_TTL -// - DID_CACHE_MAX_TTL -// - LABELER_DID -// - HIVE_API_KEY -// - INDEXER_PARTITION_IDS -// - INDEXER_PARTITION_BATCH_SIZE -// - INDEXER_CONCURRENCY -// - INDEXER_SUB_LOCK_ID -const getEnv = () => ({ - version: process.env.BSKY_VERSION, - dbPostgresUrl: - process.env.DB_PRIMARY_POSTGRES_URL || process.env.DB_POSTGRES_URL, - dbPostgresSchema: process.env.DB_POSTGRES_SCHEMA || undefined, - dbPoolSize: maybeParseInt(process.env.DB_POOL_SIZE), - dbPoolMaxUses: maybeParseInt(process.env.DB_POOL_MAX_USES), - dbPoolIdleTimeoutMs: maybeParseInt(process.env.DB_POOL_IDLE_TIMEOUT_MS), - bunnyAccessKey: process.env.BUNNY_ACCESS_KEY, - cfDistributionId: process.env.CF_DISTRIBUTION_ID, - imgUriEndpoint: process.env.IMG_URI_ENDPOINT, -}) - -const maybeParseInt = (str) => { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} - -main() diff --git a/services/bsky/ingester.js b/services/bsky/ingester.js deleted file mode 100644 index 19c33ea1067..00000000000 --- a/services/bsky/ingester.js +++ /dev/null @@ -1,75 +0,0 @@ -'use strict' /* eslint-disable */ - -require('dd-trace/init') // Only works with commonjs - -// Tracer code above must come before anything else -const { - PrimaryDatabase, - IngesterConfig, - BskyIngester, - Redis, -} = require('@atproto/bsky') - -const main = async () => { - const env = getEnv() - // No migration: ingester only uses pg for a lock - const db = new PrimaryDatabase({ - url: env.dbPostgresUrl, - schema: env.dbPostgresSchema, - poolSize: env.dbPoolSize, - poolMaxUses: env.dbPoolMaxUses, - poolIdleTimeoutMs: env.dbPoolIdleTimeoutMs, - }) - const cfg = IngesterConfig.readEnv({ - version: env.version, - dbPostgresUrl: env.dbPostgresUrl, - dbPostgresSchema: env.dbPostgresSchema, - repoProvider: env.repoProvider, - ingesterSubLockId: env.subLockId, - }) - const redis = new Redis( - cfg.redisSentinelName - ? { - sentinel: cfg.redisSentinelName, - hosts: cfg.redisSentinelHosts, - password: cfg.redisPassword, - } - : { - host: cfg.redisHost, - password: cfg.redisPassword, - }, - ) - const ingester = BskyIngester.create({ db, redis, cfg }) - await ingester.start() - process.on('SIGTERM', async () => { - await ingester.destroy() - }) -} - -// Also accepts the following in readEnv(): -// - REDIS_HOST -// - REDIS_SENTINEL_NAME -// - REDIS_SENTINEL_HOSTS -// - REDIS_PASSWORD -// - REPO_PROVIDER -// - INGESTER_PARTITION_COUNT -// - INGESTER_MAX_ITEMS -// - INGESTER_CHECK_ITEMS_EVERY_N -// - INGESTER_INITIAL_CURSOR -// - INGESTER_SUB_LOCK_ID -const getEnv = () => ({ - version: process.env.BSKY_VERSION, - dbPostgresUrl: - process.env.DB_PRIMARY_POSTGRES_URL || process.env.DB_POSTGRES_URL, - dbPostgresSchema: process.env.DB_POSTGRES_SCHEMA || undefined, - dbPoolSize: maybeParseInt(process.env.DB_POOL_SIZE), - dbPoolMaxUses: maybeParseInt(process.env.DB_POOL_MAX_USES), - dbPoolIdleTimeoutMs: maybeParseInt(process.env.DB_POOL_IDLE_TIMEOUT_MS), -}) - -const maybeParseInt = (str) => { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} - -main() From 61a03898591ac07f3aa74fe0ee05b5f60ca34ee3 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Wed, 20 Dec 2023 14:59:10 -0500 Subject: [PATCH 02/17] convert more bsky internals to dataplane, remove custom feedgens, implement mute/unmuting in mock dataplane --- packages/bsky/proto/bsky.proto | 36 +++ .../src/api/app/bsky/actor/getProfiles.ts | 4 +- .../src/api/app/bsky/feed/getPostThread.ts | 217 ------------- .../bsky/src/api/app/bsky/graph/muteActor.ts | 18 +- .../src/api/app/bsky/graph/muteActorList.ts | 19 +- .../src/api/app/bsky/graph/unmuteActor.ts | 18 +- .../src/api/app/bsky/graph/unmuteActorList.ts | 9 +- packages/bsky/src/api/blob-resolver.ts | 27 +- .../api/com/atproto/identity/resolveHandle.ts | 9 +- .../src/api/com/atproto/repo/getRecord.ts | 49 ++- .../src/api/com/atproto/temp/fetchLabels.ts | 29 +- packages/bsky/src/api/health.ts | 4 +- packages/bsky/src/api/index.ts | 16 - .../bsky/src/data-plane/gen/bsky_connect.ts | 38 ++- packages/bsky/src/data-plane/gen/bsky_pb.ts | 304 ++++++++++++++++++ .../src/data-plane/server/routes/mutes.ts | 54 ++++ packages/bsky/src/feed-gen/bsky-team.ts | 45 --- packages/bsky/src/feed-gen/hot-classic.ts | 59 ---- packages/bsky/src/feed-gen/index.ts | 15 +- packages/bsky/src/feed-gen/mutuals.ts | 61 ---- packages/bsky/src/services/index.ts | 9 +- 21 files changed, 492 insertions(+), 548 deletions(-) delete mode 100644 packages/bsky/src/feed-gen/bsky-team.ts delete mode 100644 packages/bsky/src/feed-gen/hot-classic.ts delete mode 100644 packages/bsky/src/feed-gen/mutuals.ts diff --git a/packages/bsky/proto/bsky.proto b/packages/bsky/proto/bsky.proto index db89d87ecf1..d7f0e6fed98 100644 --- a/packages/bsky/proto/bsky.proto +++ b/packages/bsky/proto/bsky.proto @@ -455,6 +455,22 @@ message GetMutesResponse { string cursor = 2; } +// mute another user +message MuteActorRequest { + string actor_did = 1; + string subject_did = 2; +} + +message MuteActorResponse {} + +// unmute another user +message UnmuteActorRequest { + string actor_did = 1; + string subject_did = 2; +} + +message UnmuteActorResponse {} + // // Mutelists // @@ -495,6 +511,22 @@ message GetMutelistSubscriptionsResponse { string cursor = 2; } +// mute a list +message MuteActorListRequest { + string actor_did = 1; + string list_uri = 2; +} + +message MuteActorListResponse {} + +// unmute a list +message UnmuteActorListRequest { + string actor_did = 1; + string list_uri = 2; +} + +message UnmuteActorListResponse {} + // // Blocks // @@ -905,11 +937,15 @@ service Service { // Mutes rpc GetActorMutesActor(GetActorMutesActorRequest) returns (GetActorMutesActorResponse); rpc GetMutes(GetMutesRequest) returns (GetMutesResponse); + rpc MuteActor(MuteActorRequest) returns (MuteActorResponse); + rpc UnmuteActor(UnmuteActorRequest) returns (UnmuteActorResponse); // Mutelists rpc GetActorMutesActorViaList(GetActorMutesActorViaListRequest) returns (GetActorMutesActorViaListResponse); rpc GetMutelistSubscription(GetMutelistSubscriptionRequest) returns (GetMutelistSubscriptionResponse); rpc GetMutelistSubscriptions(GetMutelistSubscriptionsRequest) returns (GetMutelistSubscriptionsResponse); + rpc MuteActorList(MuteActorListRequest) returns (MuteActorListResponse); + rpc UnmuteActorList(UnmuteActorListRequest) returns (UnmuteActorListResponse); // Blocks rpc GetBidirectionalBlock(GetBidirectionalBlockRequest) returns (GetBidirectionalBlockResponse); diff --git a/packages/bsky/src/api/app/bsky/actor/getProfiles.ts b/packages/bsky/src/api/app/bsky/actor/getProfiles.ts index 1a754443e3b..cfdf924c761 100644 --- a/packages/bsky/src/api/app/bsky/actor/getProfiles.ts +++ b/packages/bsky/src/api/app/bsky/actor/getProfiles.ts @@ -12,13 +12,11 @@ export default function (server: Server, ctx: AppContext) { server.app.bsky.actor.getProfiles({ auth: ctx.authOptionalVerifier, handler: async ({ auth, params, res }) => { - const db = ctx.db.getReplica() - const actorService = ctx.services.actor(db) const viewer = auth.credentials.did const [result, repoRev] = await Promise.all([ getProfile({ ...params, viewer }, ctx), - actorService.getRepoRev(viewer), + ctx.hydrator.actor.getRepoRevSafe(viewer), ]) setRepoRev(res, repoRev) diff --git a/packages/bsky/src/api/app/bsky/feed/getPostThread.ts b/packages/bsky/src/api/app/bsky/feed/getPostThread.ts index d1ba6619a36..92e1c7c79f9 100644 --- a/packages/bsky/src/api/app/bsky/feed/getPostThread.ts +++ b/packages/bsky/src/api/app/bsky/feed/getPostThread.ts @@ -82,223 +82,6 @@ const presentation = ( return { thread } } -// @TODO tidy -// const composeThread = ( -// threadData: PostThread, -// actors: ActorInfoMap, -// state: HydrationState, -// ctx: Context, -// viewer: string | null, -// ) => { -// const { feedService } = ctx -// const { posts, threadgates, embeds, blocks, labels, lists } = state - -// const post = feedService.views.formatPostView( -// threadData.post.postUri, -// actors, -// posts, -// threadgates, -// embeds, -// labels, -// lists, -// viewer, -// ) - -// // replies that are invalid due to reply-gating: -// // a. may appear as the anchor post, but without any parent or replies. -// // b. may not appear anywhere else in the thread. -// const isAnchorPost = state.threadData.post.uri === threadData.post.postUri -// const info = posts[threadData.post.postUri] -// // @TODO re-enable invalidReplyRoot check -// // const badReply = !!info?.invalidReplyRoot || !!info?.violatesThreadGate -// const badReply = !!info?.violatesThreadGate -// const omitBadReply = !isAnchorPost && badReply - -// if (!post || blocks[post.uri]?.reply || omitBadReply) { -// return { -// $type: 'app.bsky.feed.defs#notFoundPost', -// uri: threadData.post.postUri, -// notFound: true, -// } -// } - -// if (post.author.viewer?.blocking || post.author.viewer?.blockedBy) { -// return { -// $type: 'app.bsky.feed.defs#blockedPost', -// uri: threadData.post.postUri, -// blocked: true, -// author: { -// did: post.author.did, -// viewer: post.author.viewer -// ? { -// blockedBy: post.author.viewer?.blockedBy, -// blocking: post.author.viewer?.blocking, -// } -// : undefined, -// }, -// } -// } - -// let parent -// if (threadData.parent && !badReply) { -// if (threadData.parent instanceof ParentNotFoundError) { -// parent = { -// $type: 'app.bsky.feed.defs#notFoundPost', -// uri: threadData.parent.uri, -// notFound: true, -// } -// } else { -// parent = composeThread(threadData.parent, actors, state, ctx, viewer) -// } -// } - -// let replies: (ThreadViewPost | NotFoundPost | BlockedPost)[] | undefined -// if (threadData.replies && !badReply) { -// replies = threadData.replies.flatMap((reply) => { -// const thread = composeThread(reply, actors, state, ctx, viewer) -// // e.g. don't bother including #postNotFound reply placeholders for takedowns. either way matches api contract. -// const skip = [] -// return isNotFoundPost(thread) ? skip : thread -// }) -// } - -// return { -// $type: 'app.bsky.feed.defs#threadViewPost', -// post, -// parent, -// replies, -// } -// } - -// const getRelevantIds = ( -// thread: PostThread, -// ): { dids: Set; uris: Set } => { -// const dids = new Set() -// const uris = new Set() -// if (thread.parent && !(thread.parent instanceof ParentNotFoundError)) { -// const fromParent = getRelevantIds(thread.parent) -// fromParent.dids.forEach((did) => dids.add(did)) -// fromParent.uris.forEach((uri) => uris.add(uri)) -// } -// if (thread.replies) { -// for (const reply of thread.replies) { -// const fromChild = getRelevantIds(reply) -// fromChild.dids.forEach((did) => dids.add(did)) -// fromChild.uris.forEach((uri) => uris.add(uri)) -// } -// } -// dids.add(thread.post.postAuthorDid) -// uris.add(thread.post.postUri) -// if (thread.post.replyRoot) { -// // ensure root is included for checking interactions -// uris.add(thread.post.replyRoot) -// dids.add(new AtUri(thread.post.replyRoot).hostname) -// } -// return { dids, uris } -// } - -// const getThreadData = async ( -// params: Params, -// ctx: Context, -// ): Promise => { -// const { db, feedService } = ctx -// const { uri, depth, parentHeight } = params - -// const [parents, children] = await Promise.all([ -// getAncestorsAndSelfQb(db.db, { uri, parentHeight }) -// .selectFrom('ancestor') -// .innerJoin( -// feedService.selectPostQb().as('post'), -// 'post.uri', -// 'ancestor.uri', -// ) -// .selectAll('post') -// .execute(), -// getDescendentsQb(db.db, { uri, depth }) -// .selectFrom('descendent') -// .innerJoin( -// feedService.selectPostQb().as('post'), -// 'post.uri', -// 'descendent.uri', -// ) -// .selectAll('post') -// .orderBy('sortAt', 'desc') -// .execute(), -// ]) -// // prevent self-referential loops -// const includedPosts = new Set([uri]) -// const parentsByUri = parents.reduce((acc, post) => { -// return Object.assign(acc, { [post.uri]: post }) -// }, {} as Record) -// const childrenByParentUri = children.reduce((acc, child) => { -// if (!child.replyParent) return acc -// if (includedPosts.has(child.uri)) return acc -// includedPosts.add(child.uri) -// acc[child.replyParent] ??= [] -// acc[child.replyParent].push(child) -// return acc -// }, {} as Record) -// const post = parentsByUri[uri] -// if (!post) return null -// return { -// post, -// parent: post.replyParent -// ? getParentData( -// parentsByUri, -// includedPosts, -// post.replyParent, -// parentHeight, -// ) -// : undefined, -// replies: getChildrenData(childrenByParentUri, uri, depth), -// } -// } - -// const getParentData = ( -// postsByUri: Record, -// includedPosts: Set, -// uri: string, -// depth: number, -// ): PostThread | ParentNotFoundError | undefined => { -// if (depth < 1) return undefined -// if (includedPosts.has(uri)) return undefined -// includedPosts.add(uri) -// const post = postsByUri[uri] -// if (!post) return new ParentNotFoundError(uri) -// return { -// post, -// parent: post.replyParent -// ? getParentData(postsByUri, includedPosts, post.replyParent, depth - 1) -// : undefined, -// replies: [], -// } -// } - -// const getChildrenData = ( -// childrenByParentUri: Record, -// uri: string, -// depth: number, -// ): PostThread[] | undefined => { -// if (depth === 0) return undefined -// const children = childrenByParentUri[uri] ?? [] -// return children.map((row) => ({ -// post: row, -// replies: getChildrenData(childrenByParentUri, row.postUri, depth - 1), -// })) -// } - -// class ParentNotFoundError extends Error { -// constructor(public uri: string) { -// super(`Parent not found: ${uri}`) -// } -// } - -// type PostThread = { -// post: FeedRow -// parent?: PostThread | ParentNotFoundError -// replies?: PostThread[] -// } - type Context = { dataplane: DataPlaneClient hydrator: Hydrator diff --git a/packages/bsky/src/api/app/bsky/graph/muteActor.ts b/packages/bsky/src/api/app/bsky/graph/muteActor.ts index 50a3723db6e..be205f9f9b0 100644 --- a/packages/bsky/src/api/app/bsky/graph/muteActor.ts +++ b/packages/bsky/src/api/app/bsky/graph/muteActor.ts @@ -1,4 +1,3 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' @@ -7,21 +6,8 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.authVerifier, handler: async ({ auth, input }) => { const { actor } = input.body - const requester = auth.credentials.did - const db = ctx.db.getPrimary() - - const subjectDid = await ctx.services.actor(db).getActorDid(actor) - if (!subjectDid) { - throw new InvalidRequestError(`Actor not found: ${actor}`) - } - if (subjectDid === requester) { - throw new InvalidRequestError('Cannot mute oneself') - } - - await ctx.services.graph(db).muteActor({ - subjectDid, - mutedByDid: requester, - }) + const viewer = auth.credentials.did + await ctx.dataplane.muteActor({ actorDid: viewer, subjectDid: actor }) }, }) } diff --git a/packages/bsky/src/api/app/bsky/graph/muteActorList.ts b/packages/bsky/src/api/app/bsky/graph/muteActorList.ts index b6b29796c5c..3b31510cf9a 100644 --- a/packages/bsky/src/api/app/bsky/graph/muteActorList.ts +++ b/packages/bsky/src/api/app/bsky/graph/muteActorList.ts @@ -1,28 +1,13 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' -import * as lex from '../../../../lexicon/lexicons' import AppContext from '../../../../context' -import { AtUri } from '@atproto/syntax' export default function (server: Server, ctx: AppContext) { server.app.bsky.graph.muteActorList({ auth: ctx.authVerifier, handler: async ({ auth, input }) => { const { list } = input.body - const requester = auth.credentials.did - - const db = ctx.db.getPrimary() - - const listUri = new AtUri(list) - const collId = lex.ids.AppBskyGraphList - if (listUri.collection !== collId) { - throw new InvalidRequestError(`Invalid collection: expected: ${collId}`) - } - - await ctx.services.graph(db).muteActorList({ - list, - mutedByDid: requester, - }) + const viewer = auth.credentials.did + await ctx.dataplane.muteActorList({ actorDid: viewer, listUri: list }) }, }) } diff --git a/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts b/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts index 11af919126f..80da8fd844d 100644 --- a/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts +++ b/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts @@ -1,4 +1,3 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' @@ -7,21 +6,8 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.authVerifier, handler: async ({ auth, input }) => { const { actor } = input.body - const requester = auth.credentials.did - const db = ctx.db.getPrimary() - - const subjectDid = await ctx.services.actor(db).getActorDid(actor) - if (!subjectDid) { - throw new InvalidRequestError(`Actor not found: ${actor}`) - } - if (subjectDid === requester) { - throw new InvalidRequestError('Cannot mute oneself') - } - - await ctx.services.graph(db).unmuteActor({ - subjectDid, - mutedByDid: requester, - }) + const viewer = auth.credentials.did + await ctx.dataplane.unmuteActor({ actorDid: viewer, subjectDid: actor }) }, }) } diff --git a/packages/bsky/src/api/app/bsky/graph/unmuteActorList.ts b/packages/bsky/src/api/app/bsky/graph/unmuteActorList.ts index 8b97530c216..e71adb8d5c6 100644 --- a/packages/bsky/src/api/app/bsky/graph/unmuteActorList.ts +++ b/packages/bsky/src/api/app/bsky/graph/unmuteActorList.ts @@ -6,13 +6,8 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.authVerifier, handler: async ({ auth, input }) => { const { list } = input.body - const requester = auth.credentials.did - const db = ctx.db.getPrimary() - - await ctx.services.graph(db).unmuteActorList({ - list, - mutedByDid: requester, - }) + const viewer = auth.credentials.did + await ctx.dataplane.unmuteActorList({ actorDid: viewer, listUri: list }) }, }) } diff --git a/packages/bsky/src/api/blob-resolver.ts b/packages/bsky/src/api/blob-resolver.ts index 7eb245eedd5..8f43fcd7f0d 100644 --- a/packages/bsky/src/api/blob-resolver.ts +++ b/packages/bsky/src/api/blob-resolver.ts @@ -5,12 +5,10 @@ import axios, { AxiosError } from 'axios' import { CID } from 'multiformats/cid' import { ensureValidDid } from '@atproto/syntax' import { forwardStreamErrors, VerifyCidTransform } from '@atproto/common' -import { IdResolver, DidNotFoundError } from '@atproto/identity' +import { DidNotFoundError } from '@atproto/identity' import AppContext from '../context' import { httpLogger as log } from '../logger' import { retryHttp } from '../util/retry' -import { Database } from '../db' -import { sql } from 'kysely' // Resolve and verify blob from its origin host @@ -32,8 +30,7 @@ export const createRouter = (ctx: AppContext): express.Router => { return next(createError(400, 'Invalid cid')) } - const db = ctx.db.getReplica() - const verifiedImage = await resolveBlob(did, cid, db, ctx.idResolver) + const verifiedImage = await resolveBlob(ctx, did, cid) // Send chunked response, destroying stream early (before // closing chunk) if the bytes don't match the expected cid. @@ -77,24 +74,14 @@ export const createRouter = (ctx: AppContext): express.Router => { return router } -export async function resolveBlob( - did: string, - cid: CID, - db: Database, - idResolver: IdResolver, -) { +export async function resolveBlob(ctx: AppContext, did: string, cid: CID) { const cidStr = cid.toString() - const [{ pds }, takedown] = await Promise.all([ - idResolver.did.resolveAtprotoData(did), // @TODO cache did info - db.db - .selectFrom('moderation_subject_status') - .select('id') - .where('blobCids', '@>', sql`CAST(${JSON.stringify([cidStr])} AS JSONB)`) - .where('takendown', 'is', true) - .executeTakeFirst(), + const [{ pds }, { takenDown }] = await Promise.all([ + ctx.idResolver.did.resolveAtprotoData(did), + ctx.dataplane.getBlobTakedown({ actorDid: did, cid: cid.toString() }), ]) - if (takedown) { + if (takenDown) { throw createError(404, 'Blob not found') } diff --git a/packages/bsky/src/api/com/atproto/identity/resolveHandle.ts b/packages/bsky/src/api/com/atproto/identity/resolveHandle.ts index 30c1d7f8a6f..6cb524c6ec2 100644 --- a/packages/bsky/src/api/com/atproto/identity/resolveHandle.ts +++ b/packages/bsky/src/api/com/atproto/identity/resolveHandle.ts @@ -7,12 +7,9 @@ export default function (server: Server, ctx: AppContext) { server.com.atproto.identity.resolveHandle(async ({ req, params }) => { const handle = ident.normalizeHandle(params.handle || req.hostname) - const db = ctx.db.getReplica() - let did: string | undefined - const user = await ctx.services.actor(db).getActor(handle, true) - if (user) { - did = user.did - } else { + let [did] = await ctx.hydrator.actor.getDids([handle]) + + if (!did) { const publicHostname = ctx.cfg.publicUrl ? new URL(ctx.cfg.publicUrl).hostname : null diff --git a/packages/bsky/src/api/com/atproto/repo/getRecord.ts b/packages/bsky/src/api/com/atproto/repo/getRecord.ts index c42c1fd6b4c..d492816d0e4 100644 --- a/packages/bsky/src/api/com/atproto/repo/getRecord.ts +++ b/packages/bsky/src/api/com/atproto/repo/getRecord.ts @@ -1,38 +1,55 @@ +import { CID } from 'multiformats/cid' import { InvalidRequestError } from '@atproto/xrpc-server' import { AtUri } from '@atproto/syntax' import { Server } from '../../../../lexicon' +import { ids } from '../../../../lexicon/lexicons' import AppContext from '../../../../context' -import { jsonStringToLex } from '@atproto/lexicon' export default function (server: Server, ctx: AppContext) { server.com.atproto.repo.getRecord(async ({ params }) => { const { repo, collection, rkey, cid } = params - const db = ctx.db.getReplica() - const did = await ctx.services.actor(db).getActorDid(repo) + const [did] = await ctx.hydrator.actor.getDids([repo]) if (!did) { throw new InvalidRequestError(`Could not find repo: ${repo}`) } - const uri = AtUri.make(did, collection, rkey) + const uri = AtUri.make(did, collection, rkey).toString() - let builder = db.db - .selectFrom('record') - .selectAll() - .where('uri', '=', uri.toString()) - if (cid) { - builder = builder.where('cid', '=', cid) + let result: { cid: CID; record: Record } | null | undefined + if (collection === ids.AppBskyFeedPost) { + result = (await ctx.hydrator.feed.getPosts([uri])).get(uri) + } else if (collection === ids.AppBskyFeedRepost) { + result = (await ctx.hydrator.feed.getReposts([uri])).get(uri) + } else if (collection === ids.AppBskyFeedLike) { + result = (await ctx.hydrator.feed.getLikes([uri])).get(uri) + } else if (collection === ids.AppBskyGraphFollow) { + result = (await ctx.hydrator.graph.getFollows([uri])).get(uri) + } else if (collection === ids.AppBskyGraphList) { + result = (await ctx.hydrator.graph.getLists([uri])).get(uri) + } else if (collection === ids.AppBskyGraphListitem) { + result = (await ctx.hydrator.graph.getListItems([uri])).get(uri) + } else if (collection === ids.AppBskyGraphBlock) { + result = (await ctx.hydrator.graph.getBlocks([uri])).get(uri) + } else if (collection === ids.AppBskyFeedGenerator) { + result = (await ctx.hydrator.feed.getFeedGens([uri])).get(uri) + } else if (collection === ids.AppBskyActorProfile) { + const actor = (await ctx.hydrator.actor.getActors([did])).get(did) + result = + actor?.profile && actor?.profileCid + ? { record: actor.profile, cid: actor.profileCid } + : undefined } - const record = await builder.executeTakeFirst() - if (!record) { + if (!result || (cid && result.cid.toString() !== cid)) { throw new InvalidRequestError(`Could not locate record: ${uri}`) } + return { - encoding: 'application/json', + encoding: 'application/json' as const, body: { - uri: record.uri, - cid: record.cid, - value: jsonStringToLex(record.json) as Record, + uri: uri, + cid: result.cid.toString(), + value: result.record, }, } }) diff --git a/packages/bsky/src/api/com/atproto/temp/fetchLabels.ts b/packages/bsky/src/api/com/atproto/temp/fetchLabels.ts index 8a6cacc2fbd..044a8d4dfd4 100644 --- a/packages/bsky/src/api/com/atproto/temp/fetchLabels.ts +++ b/packages/bsky/src/api/com/atproto/temp/fetchLabels.ts @@ -1,30 +1,9 @@ import { Server } from '../../../../lexicon' import AppContext from '../../../../context' +import { InvalidRequestError } from '@atproto/xrpc-server' -export default function (server: Server, ctx: AppContext) { - server.com.atproto.temp.fetchLabels(async ({ params }) => { - const { limit } = params - const db = ctx.db.getReplica() - const since = - params.since !== undefined ? new Date(params.since).toISOString() : '' - const labelRes = await db.db - .selectFrom('label') - .selectAll() - .orderBy('label.cts', 'asc') - .where('cts', '>', since) - .limit(limit) - .execute() - - const labels = labelRes.map((l) => ({ - ...l, - cid: l.cid === '' ? undefined : l.cid, - })) - - return { - encoding: 'application/json', - body: { - labels, - }, - } +export default function (server: Server, _ctx: AppContext) { + server.com.atproto.temp.fetchLabels(async (_reqCtx) => { + throw new InvalidRequestError('not implemented on dataplane') }) } diff --git a/packages/bsky/src/api/health.ts b/packages/bsky/src/api/health.ts index bdcdeefcb4b..fa4096de620 100644 --- a/packages/bsky/src/api/health.ts +++ b/packages/bsky/src/api/health.ts @@ -1,5 +1,4 @@ import express from 'express' -import { sql } from 'kysely' import AppContext from '../context' export const createRouter = (ctx: AppContext): express.Router => { @@ -7,9 +6,8 @@ export const createRouter = (ctx: AppContext): express.Router => { router.get('/xrpc/_health', async function (req, res) { const { version } = ctx.cfg - const db = ctx.db.getPrimary() try { - await sql`select 1`.execute(db.db) + await ctx.dataplane.ping({}) } catch (err) { req.log.error(err, 'failed health check') return res.status(503).send({ version, error: 'Service Unavailable' }) diff --git a/packages/bsky/src/api/index.ts b/packages/bsky/src/api/index.ts index da21b582019..ca2acaef1dd 100644 --- a/packages/bsky/src/api/index.ts +++ b/packages/bsky/src/api/index.ts @@ -40,16 +40,8 @@ import updateSeen from './app/bsky/notification/updateSeen' import registerPush from './app/bsky/notification/registerPush' import getPopularFeedGenerators from './app/bsky/unspecced/getPopularFeedGenerators' import getTimelineSkeleton from './app/bsky/unspecced/getTimelineSkeleton' -import createReport from './com/atproto/moderation/createReport' -import emitModerationEvent from './com/atproto/admin/emitModerationEvent' -import searchRepos from './com/atproto/admin/searchRepos' -import adminGetRecord from './com/atproto/admin/getRecord' -import getRepo from './com/atproto/admin/getRepo' -import queryModerationStatuses from './com/atproto/admin/queryModerationStatuses' import resolveHandle from './com/atproto/identity/resolveHandle' import getRecord from './com/atproto/repo/getRecord' -import queryModerationEvents from './com/atproto/admin/queryModerationEvents' -import getModerationEvent from './com/atproto/admin/getModerationEvent' import fetchLabels from './com/atproto/temp/fetchLabels' export * as health from './health' @@ -101,14 +93,6 @@ export default function (server: Server, ctx: AppContext) { getPopularFeedGenerators(server, ctx) getTimelineSkeleton(server, ctx) // com.atproto - createReport(server, ctx) - emitModerationEvent(server, ctx) - searchRepos(server, ctx) - adminGetRecord(server, ctx) - getRepo(server, ctx) - getModerationEvent(server, ctx) - queryModerationEvents(server, ctx) - queryModerationStatuses(server, ctx) resolveHandle(server, ctx) getRecord(server, ctx) fetchLabels(server, ctx) diff --git a/packages/bsky/src/data-plane/gen/bsky_connect.ts b/packages/bsky/src/data-plane/gen/bsky_connect.ts index c61bdd2ad2e..ed35f836cd8 100644 --- a/packages/bsky/src/data-plane/gen/bsky_connect.ts +++ b/packages/bsky/src/data-plane/gen/bsky_connect.ts @@ -3,7 +3,7 @@ /* eslint-disable */ // @ts-nocheck -import { GetActorFeedsRequest, GetActorFeedsResponse, GetActorFollowsActorsRequest, GetActorFollowsActorsResponse, GetActorLikesRequest, GetActorLikesResponse, GetActorListsRequest, GetActorListsResponse, GetActorMutesActorRequest, GetActorMutesActorResponse, GetActorMutesActorViaListRequest, GetActorMutesActorViaListResponse, GetActorRepostsRequest, GetActorRepostsResponse, GetActorsRequest, GetActorsResponse, GetAuthorFeedRequest, GetAuthorFeedResponse, GetBidirectionalBlockRequest, GetBidirectionalBlockResponse, GetBidirectionalBlockViaListRequest, GetBidirectionalBlockViaListResponse, GetBlobTakedownRequest, GetBlobTakedownResponse, GetBlockExistenceRequest, GetBlockExistenceResponse, GetBlocklistSubscriptionRequest, GetBlocklistSubscriptionResponse, GetBlocklistSubscriptionsRequest, GetBlocklistSubscriptionsResponse, GetBlockRecordsRequest, GetBlockRecordsResponse, GetBlocksRequest, GetBlocksResponse, GetDidsByHandlesRequest, GetDidsByHandlesResponse, GetFeedGeneratorRecordsRequest, GetFeedGeneratorRecordsResponse, GetFeedGeneratorStatusRequest, GetFeedGeneratorStatusResponse, GetFollowCountsRequest, GetFollowCountsResponse, GetFollowerCountsRequest, GetFollowerCountsResponse, GetFollowersRequest, GetFollowersResponse, GetFollowRecordsRequest, GetFollowRecordsResponse, GetFollowsRequest, GetFollowsResponse, GetFollowSuggestionsRequest, GetFollowSuggestionsResponse, GetLabelsRequest, GetLabelsResponse, GetLatestRevRequest, GetLatestRevResponse, GetLikeCountsRequest, GetLikeCountsResponse, GetLikeRecordsRequest, GetLikeRecordsResponse, GetLikesByActorAndSubjectsRequest, GetLikesByActorAndSubjectsResponse, GetLikesBySubjectRequest, GetLikesBySubjectResponse, GetListBlockRecordsRequest, GetListBlockRecordsResponse, GetListCountRequest, GetListCountResponse, GetListFeedRequest, GetListFeedResponse, GetListItemRecordsRequest, GetListItemRecordsResponse, GetListMembershipRequest, GetListMembershipResponse, GetListMembersRequest, GetListMembersResponse, GetListRecordsRequest, GetListRecordsResponse, GetMutelistSubscriptionRequest, GetMutelistSubscriptionResponse, GetMutelistSubscriptionsRequest, GetMutelistSubscriptionsResponse, GetMutesRequest, GetMutesResponse, GetNotificationSeenRequest, GetNotificationSeenResponse, GetNotificationsRequest, GetNotificationsResponse, GetPostCountsRequest, GetPostCountsResponse, GetPostRecordsRequest, GetPostRecordsResponse, GetPostReplyCountsRequest, GetPostReplyCountsResponse, GetProfileRecordsRequest, GetProfileRecordsResponse, GetRelationshipsRequest, GetRelationshipsResponse, GetRepostCountsRequest, GetRepostCountsResponse, GetRepostRecordsRequest, GetRepostRecordsResponse, GetRepostsByActorAndSubjectsRequest, GetRepostsByActorAndSubjectsResponse, GetRepostsBySubjectRequest, GetRepostsBySubjectResponse, GetSuggestedFeedsRequest, GetSuggestedFeedsResponse, GetThreadGateRecordsRequest, GetThreadGateRecordsResponse, GetThreadRequest, GetThreadResponse, GetTimelineRequest, GetTimelineResponse, GetUnreadNotificationCountRequest, GetUnreadNotificationCountResponse, PingRequest, PingResponse, SearchActorsRequest, SearchActorsResponse, SearchPostsRequest, SearchPostsResponse, UpdateNotificationSeenRequest, UpdateNotificationSeenResponse, UpdateTakedownRequest, UpdateTakedownResponse } from "./bsky_pb.ts"; +import { GetActorFeedsRequest, GetActorFeedsResponse, GetActorFollowsActorsRequest, GetActorFollowsActorsResponse, GetActorLikesRequest, GetActorLikesResponse, GetActorListsRequest, GetActorListsResponse, GetActorMutesActorRequest, GetActorMutesActorResponse, GetActorMutesActorViaListRequest, GetActorMutesActorViaListResponse, GetActorRepostsRequest, GetActorRepostsResponse, GetActorsRequest, GetActorsResponse, GetAuthorFeedRequest, GetAuthorFeedResponse, GetBidirectionalBlockRequest, GetBidirectionalBlockResponse, GetBidirectionalBlockViaListRequest, GetBidirectionalBlockViaListResponse, GetBlobTakedownRequest, GetBlobTakedownResponse, GetBlockExistenceRequest, GetBlockExistenceResponse, GetBlocklistSubscriptionRequest, GetBlocklistSubscriptionResponse, GetBlocklistSubscriptionsRequest, GetBlocklistSubscriptionsResponse, GetBlockRecordsRequest, GetBlockRecordsResponse, GetBlocksRequest, GetBlocksResponse, GetDidsByHandlesRequest, GetDidsByHandlesResponse, GetFeedGeneratorRecordsRequest, GetFeedGeneratorRecordsResponse, GetFeedGeneratorStatusRequest, GetFeedGeneratorStatusResponse, GetFollowCountsRequest, GetFollowCountsResponse, GetFollowerCountsRequest, GetFollowerCountsResponse, GetFollowersRequest, GetFollowersResponse, GetFollowRecordsRequest, GetFollowRecordsResponse, GetFollowsRequest, GetFollowsResponse, GetFollowSuggestionsRequest, GetFollowSuggestionsResponse, GetLabelsRequest, GetLabelsResponse, GetLatestRevRequest, GetLatestRevResponse, GetLikeCountsRequest, GetLikeCountsResponse, GetLikeRecordsRequest, GetLikeRecordsResponse, GetLikesByActorAndSubjectsRequest, GetLikesByActorAndSubjectsResponse, GetLikesBySubjectRequest, GetLikesBySubjectResponse, GetListBlockRecordsRequest, GetListBlockRecordsResponse, GetListCountRequest, GetListCountResponse, GetListFeedRequest, GetListFeedResponse, GetListItemRecordsRequest, GetListItemRecordsResponse, GetListMembershipRequest, GetListMembershipResponse, GetListMembersRequest, GetListMembersResponse, GetListRecordsRequest, GetListRecordsResponse, GetMutelistSubscriptionRequest, GetMutelistSubscriptionResponse, GetMutelistSubscriptionsRequest, GetMutelistSubscriptionsResponse, GetMutesRequest, GetMutesResponse, GetNotificationSeenRequest, GetNotificationSeenResponse, GetNotificationsRequest, GetNotificationsResponse, GetPostCountsRequest, GetPostCountsResponse, GetPostRecordsRequest, GetPostRecordsResponse, GetPostReplyCountsRequest, GetPostReplyCountsResponse, GetProfileRecordsRequest, GetProfileRecordsResponse, GetRelationshipsRequest, GetRelationshipsResponse, GetRepostCountsRequest, GetRepostCountsResponse, GetRepostRecordsRequest, GetRepostRecordsResponse, GetRepostsByActorAndSubjectsRequest, GetRepostsByActorAndSubjectsResponse, GetRepostsBySubjectRequest, GetRepostsBySubjectResponse, GetSuggestedFeedsRequest, GetSuggestedFeedsResponse, GetThreadGateRecordsRequest, GetThreadGateRecordsResponse, GetThreadRequest, GetThreadResponse, GetTimelineRequest, GetTimelineResponse, GetUnreadNotificationCountRequest, GetUnreadNotificationCountResponse, MuteActorListRequest, MuteActorListResponse, MuteActorRequest, MuteActorResponse, PingRequest, PingResponse, SearchActorsRequest, SearchActorsResponse, SearchPostsRequest, SearchPostsResponse, UnmuteActorListRequest, UnmuteActorListResponse, UnmuteActorRequest, UnmuteActorResponse, UpdateNotificationSeenRequest, UpdateNotificationSeenResponse, UpdateTakedownRequest, UpdateTakedownResponse } from "./bsky_pb.ts"; import { MethodKind } from "@bufbuild/protobuf"; /** @@ -334,6 +334,24 @@ export const Service = { O: GetMutesResponse, kind: MethodKind.Unary, }, + /** + * @generated from rpc bsky.Service.MuteActor + */ + muteActor: { + name: "MuteActor", + I: MuteActorRequest, + O: MuteActorResponse, + kind: MethodKind.Unary, + }, + /** + * @generated from rpc bsky.Service.UnmuteActor + */ + unmuteActor: { + name: "UnmuteActor", + I: UnmuteActorRequest, + O: UnmuteActorResponse, + kind: MethodKind.Unary, + }, /** * Mutelists * @@ -363,6 +381,24 @@ export const Service = { O: GetMutelistSubscriptionsResponse, kind: MethodKind.Unary, }, + /** + * @generated from rpc bsky.Service.MuteActorList + */ + muteActorList: { + name: "MuteActorList", + I: MuteActorListRequest, + O: MuteActorListResponse, + kind: MethodKind.Unary, + }, + /** + * @generated from rpc bsky.Service.UnmuteActorList + */ + unmuteActorList: { + name: "UnmuteActorList", + I: UnmuteActorListRequest, + O: UnmuteActorListResponse, + kind: MethodKind.Unary, + }, /** * Blocks * diff --git a/packages/bsky/src/data-plane/gen/bsky_pb.ts b/packages/bsky/src/data-plane/gen/bsky_pb.ts index ae6aae74197..34c0250de82 100644 --- a/packages/bsky/src/data-plane/gen/bsky_pb.ts +++ b/packages/bsky/src/data-plane/gen/bsky_pb.ts @@ -3245,6 +3245,158 @@ export class GetMutesResponse extends Message { } } +/** + * mute another user + * + * @generated from message bsky.MuteActorRequest + */ +export class MuteActorRequest extends Message { + /** + * @generated from field: string actor_did = 1; + */ + actorDid = ""; + + /** + * @generated from field: string subject_did = 2; + */ + subjectDid = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.MuteActorRequest"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "actor_did", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 2, name: "subject_did", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): MuteActorRequest { + return new MuteActorRequest().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): MuteActorRequest { + return new MuteActorRequest().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): MuteActorRequest { + return new MuteActorRequest().fromJsonString(jsonString, options); + } + + static equals(a: MuteActorRequest | PlainMessage | undefined, b: MuteActorRequest | PlainMessage | undefined): boolean { + return proto3.util.equals(MuteActorRequest, a, b); + } +} + +/** + * @generated from message bsky.MuteActorResponse + */ +export class MuteActorResponse extends Message { + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.MuteActorResponse"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): MuteActorResponse { + return new MuteActorResponse().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): MuteActorResponse { + return new MuteActorResponse().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): MuteActorResponse { + return new MuteActorResponse().fromJsonString(jsonString, options); + } + + static equals(a: MuteActorResponse | PlainMessage | undefined, b: MuteActorResponse | PlainMessage | undefined): boolean { + return proto3.util.equals(MuteActorResponse, a, b); + } +} + +/** + * unmute another user + * + * @generated from message bsky.UnmuteActorRequest + */ +export class UnmuteActorRequest extends Message { + /** + * @generated from field: string actor_did = 1; + */ + actorDid = ""; + + /** + * @generated from field: string subject_did = 2; + */ + subjectDid = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.UnmuteActorRequest"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "actor_did", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 2, name: "subject_did", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): UnmuteActorRequest { + return new UnmuteActorRequest().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): UnmuteActorRequest { + return new UnmuteActorRequest().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): UnmuteActorRequest { + return new UnmuteActorRequest().fromJsonString(jsonString, options); + } + + static equals(a: UnmuteActorRequest | PlainMessage | undefined, b: UnmuteActorRequest | PlainMessage | undefined): boolean { + return proto3.util.equals(UnmuteActorRequest, a, b); + } +} + +/** + * @generated from message bsky.UnmuteActorResponse + */ +export class UnmuteActorResponse extends Message { + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.UnmuteActorResponse"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): UnmuteActorResponse { + return new UnmuteActorResponse().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): UnmuteActorResponse { + return new UnmuteActorResponse().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): UnmuteActorResponse { + return new UnmuteActorResponse().fromJsonString(jsonString, options); + } + + static equals(a: UnmuteActorResponse | PlainMessage | undefined, b: UnmuteActorResponse | PlainMessage | undefined): boolean { + return proto3.util.equals(UnmuteActorResponse, a, b); + } +} + /** * - return list uri of *any* list through which user A has muted user B * - hydrating mute state onto profiles @@ -3507,6 +3659,158 @@ export class GetMutelistSubscriptionsResponse extends Message { + /** + * @generated from field: string actor_did = 1; + */ + actorDid = ""; + + /** + * @generated from field: string list_uri = 2; + */ + listUri = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.MuteActorListRequest"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "actor_did", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 2, name: "list_uri", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): MuteActorListRequest { + return new MuteActorListRequest().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): MuteActorListRequest { + return new MuteActorListRequest().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): MuteActorListRequest { + return new MuteActorListRequest().fromJsonString(jsonString, options); + } + + static equals(a: MuteActorListRequest | PlainMessage | undefined, b: MuteActorListRequest | PlainMessage | undefined): boolean { + return proto3.util.equals(MuteActorListRequest, a, b); + } +} + +/** + * @generated from message bsky.MuteActorListResponse + */ +export class MuteActorListResponse extends Message { + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.MuteActorListResponse"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): MuteActorListResponse { + return new MuteActorListResponse().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): MuteActorListResponse { + return new MuteActorListResponse().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): MuteActorListResponse { + return new MuteActorListResponse().fromJsonString(jsonString, options); + } + + static equals(a: MuteActorListResponse | PlainMessage | undefined, b: MuteActorListResponse | PlainMessage | undefined): boolean { + return proto3.util.equals(MuteActorListResponse, a, b); + } +} + +/** + * unmute a list + * + * @generated from message bsky.UnmuteActorListRequest + */ +export class UnmuteActorListRequest extends Message { + /** + * @generated from field: string actor_did = 1; + */ + actorDid = ""; + + /** + * @generated from field: string list_uri = 2; + */ + listUri = ""; + + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.UnmuteActorListRequest"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + { no: 1, name: "actor_did", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + { no: 2, name: "list_uri", kind: "scalar", T: 9 /* ScalarType.STRING */ }, + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): UnmuteActorListRequest { + return new UnmuteActorListRequest().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): UnmuteActorListRequest { + return new UnmuteActorListRequest().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): UnmuteActorListRequest { + return new UnmuteActorListRequest().fromJsonString(jsonString, options); + } + + static equals(a: UnmuteActorListRequest | PlainMessage | undefined, b: UnmuteActorListRequest | PlainMessage | undefined): boolean { + return proto3.util.equals(UnmuteActorListRequest, a, b); + } +} + +/** + * @generated from message bsky.UnmuteActorListResponse + */ +export class UnmuteActorListResponse extends Message { + constructor(data?: PartialMessage) { + super(); + proto3.util.initPartial(data, this); + } + + static readonly runtime: typeof proto3 = proto3; + static readonly typeName = "bsky.UnmuteActorListResponse"; + static readonly fields: FieldList = proto3.util.newFieldList(() => [ + ]); + + static fromBinary(bytes: Uint8Array, options?: Partial): UnmuteActorListResponse { + return new UnmuteActorListResponse().fromBinary(bytes, options); + } + + static fromJson(jsonValue: JsonValue, options?: Partial): UnmuteActorListResponse { + return new UnmuteActorListResponse().fromJson(jsonValue, options); + } + + static fromJsonString(jsonString: string, options?: Partial): UnmuteActorListResponse { + return new UnmuteActorListResponse().fromJsonString(jsonString, options); + } + + static equals(a: UnmuteActorListResponse | PlainMessage | undefined, b: UnmuteActorListResponse | PlainMessage | undefined): boolean { + return proto3.util.equals(UnmuteActorListResponse, a, b); + } +} + /** * - Return block uri if there is a block between users A & B (bidirectional) * - hydrating (& actioning) block state on profiles diff --git a/packages/bsky/src/data-plane/server/routes/mutes.ts b/packages/bsky/src/data-plane/server/routes/mutes.ts index a25042556b6..73d9154f28f 100644 --- a/packages/bsky/src/data-plane/server/routes/mutes.ts +++ b/packages/bsky/src/data-plane/server/routes/mutes.ts @@ -1,4 +1,7 @@ +import assert from 'assert' import { ServiceImpl } from '@connectrpc/connect' +import { AtUri } from '@atproto/syntax' +import { ids } from '../../../lexicon/lexicons' import { Service } from '../../gen/bsky_connect' import { Database } from '../../../db' import { @@ -106,4 +109,55 @@ export default (db: Database): Partial> => ({ cursor: keyset.packFromResult(lists), } }, + + async muteActor(req) { + const { actorDid, subjectDid } = req + assert(actorDid !== subjectDid, 'cannot mute yourself') + await db.db + .insertInto('mute') + .values({ + subjectDid, + mutedByDid: actorDid, + createdAt: new Date().toISOString(), + }) + .onConflict((oc) => oc.doNothing()) + .execute() + }, + + async unmuteActor(req) { + const { actorDid, subjectDid } = req + assert(actorDid !== subjectDid, 'cannot mute yourself') + await db.db + .deleteFrom('mute') + .where('subjectDid', '=', subjectDid) + .where('mutedByDid', '=', actorDid) + .execute() + }, + + async muteActorList(req) { + const { actorDid, listUri } = req + assert(isListUri(listUri), 'must mute a list') + await db.db + .insertInto('list_mute') + .values({ + listUri, + mutedByDid: actorDid, + createdAt: new Date().toISOString(), + }) + .onConflict((oc) => oc.doNothing()) + .execute() + }, + + async unmuteActorList(req) { + const { actorDid, listUri } = req + assert(isListUri(listUri), 'must mute a list') + await db.db + .deleteFrom('list_mute') + .where('listUri', '=', listUri) + .where('mutedByDid', '=', actorDid) + .execute() + }, }) + +const isListUri = (uri: string) => + new AtUri(uri).collection === ids.AppBskyGraphList diff --git a/packages/bsky/src/feed-gen/bsky-team.ts b/packages/bsky/src/feed-gen/bsky-team.ts deleted file mode 100644 index 37e1cc317e5..00000000000 --- a/packages/bsky/src/feed-gen/bsky-team.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { NotEmptyArray } from '@atproto/common' -import { QueryParams as SkeletonParams } from '../lexicon/types/app/bsky/feed/getFeedSkeleton' -import AppContext from '../context' -import { paginate } from '../db/pagination' -import { AlgoHandler, AlgoResponse } from './types' -import { FeedKeyset } from '../api/app/bsky/util/feed' - -const BSKY_TEAM: NotEmptyArray = [ - 'did:plc:z72i7hdynmk6r22z27h6tvur', // @bsky.app - 'did:plc:ewvi7nxzyoun6zhxrhs64oiz', // @atproto.com - 'did:plc:eon2iu7v3x2ukgxkqaf7e5np', // @safety.bsky.app -] - -const handler: AlgoHandler = async ( - ctx: AppContext, - params: SkeletonParams, - _viewer: string | null, -): Promise => { - const { limit = 50, cursor } = params - const db = ctx.db.getReplica('feed') - const feedService = ctx.services.feed(db) - - const { ref } = db.db.dynamic - - const postsQb = feedService - .selectPostQb() - .where('post.creator', 'in', BSKY_TEAM) - - const keyset = new FeedKeyset(ref('sortAt'), ref('cid')) - - let feedQb = db.db.selectFrom(postsQb.as('feed_items')).selectAll() - feedQb = paginate(feedQb, { limit, cursor, keyset }) - - const feedItemsRes = await feedQb.execute() - const feedItems = feedItemsRes.map((item) => ({ - itemUri: item.uri, - postUri: item.postUri, - })) - return { - feedItems, - cursor: keyset.packFromResult(feedItemsRes), - } -} - -export default handler diff --git a/packages/bsky/src/feed-gen/hot-classic.ts b/packages/bsky/src/feed-gen/hot-classic.ts deleted file mode 100644 index 4ddc449e66e..00000000000 --- a/packages/bsky/src/feed-gen/hot-classic.ts +++ /dev/null @@ -1,59 +0,0 @@ -import AppContext from '../context' -import { NotEmptyArray } from '@atproto/common' -import { QueryParams as SkeletonParams } from '../lexicon/types/app/bsky/feed/getFeedSkeleton' -import { paginate } from '../db/pagination' -import { AlgoHandler, AlgoResponse } from './types' -import { FeedKeyset } from '../api/app/bsky/util/feed' -import { valuesList } from '../db/util' - -const NO_WHATS_HOT_LABELS: NotEmptyArray = ['!no-promote'] - -const handler: AlgoHandler = async ( - ctx: AppContext, - params: SkeletonParams, - _viewer: string | null, -): Promise => { - const { limit = 50, cursor } = params - const db = ctx.db.getReplica('feed') - const feedService = ctx.services.feed(db) - - const { ref } = db.db.dynamic - - const postsQb = feedService - .selectPostQb() - .leftJoin('post_agg', 'post_agg.uri', 'post.uri') - .leftJoin('post_embed_record', 'post_embed_record.postUri', 'post.uri') - .where('post_agg.likeCount', '>=', 12) - .where('post.replyParent', 'is', null) - .whereNotExists((qb) => - qb - .selectFrom('label') - .selectAll() - .whereRef('val', 'in', valuesList(NO_WHATS_HOT_LABELS)) - .where('neg', '=', false) - .where((clause) => - clause - .whereRef('label.uri', '=', ref('post.creator')) - .orWhereRef('label.uri', '=', ref('post.uri')) - .orWhereRef('label.uri', '=', ref('post_embed_record.embedUri')), - ), - ) - - const keyset = new FeedKeyset(ref('sortAt'), ref('cid')) - - let feedQb = db.db.selectFrom(postsQb.as('feed_items')).selectAll() - feedQb = paginate(feedQb, { limit, cursor, keyset }) - - const feedItemsRes = await feedQb.execute() - const feedItems = feedItemsRes.map((item) => ({ - itemUri: item.uri, - postUri: item.postUri, - })) - - return { - feedItems, - cursor: keyset.packFromResult(feedItemsRes), - } -} - -export default handler diff --git a/packages/bsky/src/feed-gen/index.ts b/packages/bsky/src/feed-gen/index.ts index 5109d32416c..ede5c861459 100644 --- a/packages/bsky/src/feed-gen/index.ts +++ b/packages/bsky/src/feed-gen/index.ts @@ -1,17 +1,6 @@ -import { AtUri } from '@atproto/syntax' -import { ids } from '../lexicon/lexicons' -import bskyTeam from './bsky-team' -import hotClassic from './hot-classic' -import mutuals from './mutuals' import { MountedAlgos } from './types' -const feedgenUri = (did, name) => - AtUri.make(did, ids.AppBskyFeedGenerator, name).toString() - // These are custom algorithms that will be mounted directly onto an AppView // Feel free to remove, update to your own, or serve the following logic at a record that you control -export const makeAlgos = (did: string): MountedAlgos => ({ - [feedgenUri(did, 'bsky-team')]: bskyTeam, - [feedgenUri(did, 'hot-classic')]: hotClassic, - [feedgenUri(did, 'mutuals')]: mutuals, -}) +// @TODO currently empty during appview v2 build out +export const makeAlgos = (_did: string): MountedAlgos => ({}) diff --git a/packages/bsky/src/feed-gen/mutuals.ts b/packages/bsky/src/feed-gen/mutuals.ts deleted file mode 100644 index 24089818f8b..00000000000 --- a/packages/bsky/src/feed-gen/mutuals.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { QueryParams as SkeletonParams } from '../lexicon/types/app/bsky/feed/getFeedSkeleton' -import AppContext from '../context' -import { paginate } from '../db/pagination' -import { AlgoHandler, AlgoResponse } from './types' -import { FeedKeyset, getFeedDateThreshold } from '../api/app/bsky/util/feed' -import { AuthRequiredError } from '@atproto/xrpc-server' - -const handler: AlgoHandler = async ( - ctx: AppContext, - params: SkeletonParams, - viewer: string | null, -): Promise => { - if (!viewer) { - throw new AuthRequiredError('This feed requires being logged-in') - } - - const { limit = 50, cursor } = params - const db = ctx.db.getReplica('feed') - const feedService = ctx.services.feed(db) - const { ref } = db.db.dynamic - - const mutualsSubquery = db.db - .selectFrom('follow') - .where('follow.creator', '=', viewer) - .whereExists((qb) => - qb - .selectFrom('follow as follow_inner') - .whereRef('follow_inner.creator', '=', 'follow.subjectDid') - .where('follow_inner.subjectDid', '=', viewer) - .selectAll(), - ) - .select('follow.subjectDid') - - const keyset = new FeedKeyset(ref('feed_item.sortAt'), ref('feed_item.cid')) - const sortFrom = keyset.unpack(cursor)?.primary - - let feedQb = feedService - .selectFeedItemQb() - .where('feed_item.type', '=', 'post') // ensures originatorDid is post.creator - .where((qb) => - qb - .where('originatorDid', '=', viewer) - .orWhere('originatorDid', 'in', mutualsSubquery), - ) - .where('feed_item.sortAt', '>', getFeedDateThreshold(sortFrom)) - - feedQb = paginate(feedQb, { limit, cursor, keyset }) - - const feedItemsRes = await feedQb.execute() - const feedItems = feedItemsRes.map((item) => ({ - itemUri: item.uri, - postUri: item.postUri, - })) - - return { - feedItems, - cursor: keyset.packFromResult(feedItemsRes), - } -} - -export default handler diff --git a/packages/bsky/src/services/index.ts b/packages/bsky/src/services/index.ts index c3fe47e6eff..20bac6935dd 100644 --- a/packages/bsky/src/services/index.ts +++ b/packages/bsky/src/services/index.ts @@ -1,9 +1,8 @@ -import { Database, PrimaryDatabase } from '../db' +import { Database } from '../db' import { ImageUriBuilder } from '../image/uri' import { ActorService } from './actor' import { FeedService } from './feed' import { GraphService } from './graph' -import { ModerationService } from './moderation' import { LabelService } from './label' import { ImageInvalidator } from '../image/invalidator' import { LabelCache } from '../label-cache' @@ -13,12 +12,11 @@ export function createServices(resources: { imgInvalidator: ImageInvalidator labelCache: LabelCache }): Services { - const { imgUriBuilder, imgInvalidator, labelCache } = resources + const { imgUriBuilder, labelCache } = resources return { actor: ActorService.creator(imgUriBuilder, labelCache), feed: FeedService.creator(imgUriBuilder, labelCache), graph: GraphService.creator(imgUriBuilder), - moderation: ModerationService.creator(imgUriBuilder, imgInvalidator), label: LabelService.creator(labelCache), } } @@ -27,10 +25,7 @@ export type Services = { actor: FromDb feed: FromDb graph: FromDb - moderation: FromDbPrimary label: FromDb } type FromDb = (db: Database) => T - -type FromDbPrimary = (db: PrimaryDatabase) => T From 96f382838dbf01ff7b354271b64b369eddf6c732 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Wed, 20 Dec 2023 16:27:23 -0500 Subject: [PATCH 03/17] remove bsky services. db and indexing logic into mock dataplane. --- .../src/api/app/bsky/actor/searchActors.ts | 4 +- .../app/bsky/actor/searchActorsTypeahead.ts | 4 +- packages/bsky/src/api/app/bsky/util/feed.ts | 19 - packages/bsky/src/background.ts | 35 -- packages/bsky/src/context.ts | 58 +- .../{ => data-plane/server}/db/coordinator.ts | 2 +- .../server}/db/database-schema.ts | 0 .../bsky/src/{ => data-plane/server}/db/db.ts | 2 +- .../src/{ => data-plane/server}/db/index.ts | 0 .../db/migrations/20230309T045948368Z-init.ts | 0 .../20230408T152211201Z-notification-init.ts | 0 .../20230417T210628672Z-moderation-init.ts | 0 .../20230420T211446071Z-did-cache.ts | 0 .../20230427T194702079Z-notif-record-index.ts | 0 .../20230605T144730094Z-post-profile-aggs.ts | 0 ...20230607T211442112Z-feed-generator-init.ts | 0 ...20230608T155101190Z-algo-whats-hot-view.ts | 0 .../20230608T201813132Z-mute-lists.ts | 0 .../migrations/20230608T205147239Z-mutes.ts | 0 .../migrations/20230609T153623961Z-blocks.ts | 0 ...30609T232122649Z-actor-deletion-indexes.ts | 0 .../20230610T203555962Z-suggested-follows.ts | 0 .../20230611T215300060Z-actor-state.ts | 0 .../20230620T161134972Z-post-langs.ts | 0 .../20230627T212437895Z-optional-handle.ts | 0 ...230629T220835893Z-remove-post-hierarchy.ts | 0 ...30703T045536691Z-feed-and-label-indices.ts | 0 .../20230720T164800037Z-posts-cursor-idx.ts | 0 ...1Z-feed-item-delete-invite-for-user-idx.ts | 0 .../20230808T172902639Z-repo-rev.ts | 0 .../20230810T203349843Z-action-duration.ts | 0 ...0230817T195936007Z-native-notifications.ts | 0 .../20230830T205507322Z-suggested-feeds.ts | 0 .../20230904T211011773Z-block-lists.ts | 0 .../20230906T222220386Z-thread-gating.ts | 0 .../20230920T213858047Z-add-tags-to-post.ts | 0 ...230929T192920807Z-record-cursor-indexes.ts | 0 ...33377Z-create-moderation-subject-status.ts | 0 .../server}/db/migrations/index.ts | 0 .../server}/db/migrations/provider.ts | 0 .../{ => data-plane/server}/db/pagination.ts | 0 .../src/{ => data-plane/server}/db/primary.ts | 2 +- .../server}/db/tables/actor-block.ts | 0 .../server}/db/tables/actor-state.ts | 0 .../server}/db/tables/actor-sync.ts | 0 .../server}/db/tables/actor.ts | 0 .../{ => data-plane/server}/db/tables/algo.ts | 0 .../server}/db/tables/did-cache.ts | 0 .../server}/db/tables/duplicate-record.ts | 0 .../server}/db/tables/feed-generator.ts | 0 .../server}/db/tables/feed-item.ts | 0 .../server}/db/tables/follow.ts | 0 .../server}/db/tables/label.ts | 0 .../{ => data-plane/server}/db/tables/like.ts | 0 .../server}/db/tables/list-block.ts | 0 .../server}/db/tables/list-item.ts | 0 .../server}/db/tables/list-mute.ts | 0 .../{ => data-plane/server}/db/tables/list.ts | 0 .../server}/db/tables/moderation.ts | 0 .../{ => data-plane/server}/db/tables/mute.ts | 0 .../db/tables/notification-push-token.ts | 0 .../server}/db/tables/notification.ts | 0 .../server}/db/tables/post-agg.ts | 0 .../server}/db/tables/post-embed.ts | 0 .../{ => data-plane/server}/db/tables/post.ts | 0 .../server}/db/tables/profile-agg.ts | 0 .../server}/db/tables/profile.ts | 0 .../server}/db/tables/record.ts | 0 .../server}/db/tables/repost.ts | 0 .../server}/db/tables/subscription.ts | 0 .../server}/db/tables/suggested-feed.ts | 0 .../server}/db/tables/suggested-follow.ts | 0 .../server}/db/tables/thread-gate.ts | 0 .../server}/db/tables/view-param.ts | 0 .../src/{ => data-plane/server}/db/types.ts | 0 .../src/{ => data-plane/server}/db/util.ts | 0 .../src/{ => data-plane/server}/did-cache.ts | 2 +- .../server}/indexing/index.ts | 70 +-- .../server}/indexing/plugins/block.ts | 18 +- .../indexing/plugins/feed-generator.ts | 18 +- .../server}/indexing/plugins/follow.ts | 20 +- .../server}/indexing/plugins/like.ts | 20 +- .../server}/indexing/plugins/list-block.ts | 18 +- .../server}/indexing/plugins/list-item.ts | 20 +- .../server}/indexing/plugins/list.ts | 18 +- .../server}/indexing/plugins/post.ts | 48 +- .../server}/indexing/plugins/profile.ts | 18 +- .../server}/indexing/plugins/repost.ts | 20 +- .../server}/indexing/plugins/thread-gate.ts | 18 +- .../server}/indexing/processor.ts | 66 +-- .../src/data-plane/server/routes/blocks.ts | 4 +- .../src/data-plane/server/routes/feed-gens.ts | 4 +- .../src/data-plane/server/routes/feeds.ts | 4 +- .../src/data-plane/server/routes/follows.ts | 6 +- .../src/data-plane/server/routes/index.ts | 2 +- .../src/data-plane/server/routes/labels.ts | 4 +- .../src/data-plane/server/routes/likes.ts | 4 +- .../src/data-plane/server/routes/lists.ts | 8 +- .../data-plane/server/routes/moderation.ts | 4 +- .../src/data-plane/server/routes/mutes.ts | 8 +- .../src/data-plane/server/routes/notifs.ts | 10 +- .../src/data-plane/server/routes/posts.ts | 2 +- .../src/data-plane/server/routes/profile.ts | 2 +- .../src/data-plane/server/routes/records.ts | 2 +- .../data-plane/server/routes/relationships.ts | 8 +- .../src/data-plane/server/routes/reposts.ts | 6 +- .../src/data-plane/server/routes/search.ts | 13 +- .../data-plane/server/routes/suggestions.ts | 2 +- .../bsky/src/data-plane/server/routes/sync.ts | 2 +- .../src/data-plane/server/routes/threads.ts | 7 +- .../feed => data-plane/server}/util.ts | 124 ++-- packages/bsky/src/db/leader.ts | 63 -- packages/bsky/src/db/views.ts | 50 -- packages/bsky/src/index.ts | 109 +--- packages/bsky/src/label-cache.ts | 90 --- packages/bsky/src/notifications.ts | 382 ------------ packages/bsky/src/services/actor/index.ts | 186 ------ packages/bsky/src/services/actor/types.ts | 75 --- packages/bsky/src/services/actor/views.ts | 372 ------------ packages/bsky/src/services/feed/index.ts | 547 ------------------ packages/bsky/src/services/feed/types.ts | 107 ---- packages/bsky/src/services/feed/views.ts | 460 --------------- packages/bsky/src/services/graph/index.ts | 381 ------------ packages/bsky/src/services/graph/types.ts | 10 - packages/bsky/src/services/index.ts | 31 - packages/bsky/src/services/label/index.ts | 173 ------ .../bsky/src/services/util/notification.ts | 70 --- packages/bsky/src/services/util/post.ts | 65 --- packages/bsky/src/services/util/search.ts | 172 ------ packages/bsky/src/views/index.ts | 3 +- packages/bsky/src/views/util.ts | 47 ++ 131 files changed, 301 insertions(+), 3818 deletions(-) delete mode 100644 packages/bsky/src/api/app/bsky/util/feed.ts delete mode 100644 packages/bsky/src/background.ts rename packages/bsky/src/{ => data-plane/server}/db/coordinator.ts (98%) rename packages/bsky/src/{ => data-plane/server}/db/database-schema.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/db.ts (98%) rename packages/bsky/src/{ => data-plane/server}/db/index.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230309T045948368Z-init.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230408T152211201Z-notification-init.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230417T210628672Z-moderation-init.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230420T211446071Z-did-cache.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230427T194702079Z-notif-record-index.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230605T144730094Z-post-profile-aggs.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230607T211442112Z-feed-generator-init.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230608T155101190Z-algo-whats-hot-view.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230608T201813132Z-mute-lists.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230608T205147239Z-mutes.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230609T153623961Z-blocks.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230609T232122649Z-actor-deletion-indexes.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230610T203555962Z-suggested-follows.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230611T215300060Z-actor-state.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230620T161134972Z-post-langs.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230627T212437895Z-optional-handle.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230629T220835893Z-remove-post-hierarchy.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230703T045536691Z-feed-and-label-indices.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230720T164800037Z-posts-cursor-idx.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230808T172902639Z-repo-rev.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230810T203349843Z-action-duration.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230817T195936007Z-native-notifications.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230830T205507322Z-suggested-feeds.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230904T211011773Z-block-lists.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230906T222220386Z-thread-gating.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230920T213858047Z-add-tags-to-post.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20230929T192920807Z-record-cursor-indexes.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/20231003T202833377Z-create-moderation-subject-status.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/index.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/migrations/provider.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/pagination.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/primary.ts (98%) rename packages/bsky/src/{ => data-plane/server}/db/tables/actor-block.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/actor-state.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/actor-sync.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/actor.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/algo.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/did-cache.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/duplicate-record.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/feed-generator.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/feed-item.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/follow.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/label.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/like.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/list-block.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/list-item.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/list-mute.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/list.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/moderation.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/mute.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/notification-push-token.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/notification.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/post-agg.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/post-embed.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/post.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/profile-agg.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/profile.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/record.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/repost.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/subscription.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/suggested-feed.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/suggested-follow.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/thread-gate.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/tables/view-param.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/types.ts (100%) rename packages/bsky/src/{ => data-plane/server}/db/util.ts (100%) rename packages/bsky/src/{ => data-plane/server}/did-cache.ts (98%) rename packages/bsky/src/{services => data-plane/server}/indexing/index.ts (85%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/block.ts (75%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/feed-generator.ts (75%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/follow.ts (83%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/like.ts (81%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/list-block.ts (75%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/list-item.ts (78%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/list.ts (74%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/post.ts (89%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/profile.ts (73%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/repost.ts (84%) rename packages/bsky/src/{services => data-plane/server}/indexing/plugins/thread-gate.ts (77%) rename packages/bsky/src/{services => data-plane/server}/indexing/processor.ts (78%) rename packages/bsky/src/{services/feed => data-plane/server}/util.ts (55%) delete mode 100644 packages/bsky/src/db/leader.ts delete mode 100644 packages/bsky/src/db/views.ts delete mode 100644 packages/bsky/src/label-cache.ts delete mode 100644 packages/bsky/src/notifications.ts delete mode 100644 packages/bsky/src/services/actor/index.ts delete mode 100644 packages/bsky/src/services/actor/types.ts delete mode 100644 packages/bsky/src/services/actor/views.ts delete mode 100644 packages/bsky/src/services/feed/index.ts delete mode 100644 packages/bsky/src/services/feed/types.ts delete mode 100644 packages/bsky/src/services/feed/views.ts delete mode 100644 packages/bsky/src/services/graph/index.ts delete mode 100644 packages/bsky/src/services/graph/types.ts delete mode 100644 packages/bsky/src/services/index.ts delete mode 100644 packages/bsky/src/services/label/index.ts delete mode 100644 packages/bsky/src/services/util/notification.ts delete mode 100644 packages/bsky/src/services/util/post.ts delete mode 100644 packages/bsky/src/services/util/search.ts diff --git a/packages/bsky/src/api/app/bsky/actor/searchActors.ts b/packages/bsky/src/api/app/bsky/actor/searchActors.ts index 5a207d02667..eb52c3a7bf5 100644 --- a/packages/bsky/src/api/app/bsky/actor/searchActors.ts +++ b/packages/bsky/src/api/app/bsky/actor/searchActors.ts @@ -13,7 +13,6 @@ import { Hydrator } from '../../../../hydration/hydrator' import { Views } from '../../../../views' import { DataPlaneClient } from '../../../../data-plane' import { parseString } from '../../../../hydration/util' -import { cleanQuery } from '../../../../services/util/search' export default function (server: Server, ctx: AppContext) { const searchActors = createPipeline( @@ -37,8 +36,7 @@ export default function (server: Server, ctx: AppContext) { const skeleton = async (inputs: SkeletonFnInput) => { const { ctx, params } = inputs - const rawQuery = params.q ?? params.term - const term = cleanQuery(rawQuery || '') + const term = params.q ?? params.term // @TODO // add hits total diff --git a/packages/bsky/src/api/app/bsky/actor/searchActorsTypeahead.ts b/packages/bsky/src/api/app/bsky/actor/searchActorsTypeahead.ts index 9ed622ecf55..7e8e572d86b 100644 --- a/packages/bsky/src/api/app/bsky/actor/searchActorsTypeahead.ts +++ b/packages/bsky/src/api/app/bsky/actor/searchActorsTypeahead.ts @@ -13,7 +13,6 @@ import { Hydrator } from '../../../../hydration/hydrator' import { Views } from '../../../../views' import { DataPlaneClient } from '../../../../data-plane' import { parseString } from '../../../../hydration/util' -import { cleanQuery } from '../../../../services/util/search' export default function (server: Server, ctx: AppContext) { const searchActorsTypeahead = createPipeline( @@ -37,8 +36,7 @@ export default function (server: Server, ctx: AppContext) { const skeleton = async (inputs: SkeletonFnInput) => { const { ctx, params } = inputs - const rawQuery = params.q ?? params.term - const term = cleanQuery(rawQuery || '') + const term = params.q ?? params.term // @TODO // add typeahead option diff --git a/packages/bsky/src/api/app/bsky/util/feed.ts b/packages/bsky/src/api/app/bsky/util/feed.ts deleted file mode 100644 index 769b2d7e833..00000000000 --- a/packages/bsky/src/api/app/bsky/util/feed.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { TimeCidKeyset } from '../../../../db/pagination' -import { FeedRow } from '../../../../services/feed/types' - -export enum FeedAlgorithm { - ReverseChronological = 'reverse-chronological', -} - -export class FeedKeyset extends TimeCidKeyset { - labelResult(result: FeedRow) { - return { primary: result.sortAt, secondary: result.cid } - } -} - -// For users with sparse feeds, avoid scanning more than one week for a single page -export const getFeedDateThreshold = (from: string | undefined, days = 1) => { - const timelineDateThreshold = from ? new Date(from) : new Date() - timelineDateThreshold.setDate(timelineDateThreshold.getDate() - days) - return timelineDateThreshold.toISOString() -} diff --git a/packages/bsky/src/background.ts b/packages/bsky/src/background.ts deleted file mode 100644 index 466bad80a51..00000000000 --- a/packages/bsky/src/background.ts +++ /dev/null @@ -1,35 +0,0 @@ -import PQueue from 'p-queue' -import { PrimaryDatabase } from './db' -import { dbLogger } from './logger' - -// A simple queue for in-process, out-of-band/backgrounded work - -export class BackgroundQueue { - queue = new PQueue({ concurrency: 20 }) - destroyed = false - constructor(public db: PrimaryDatabase) {} - - add(task: Task) { - if (this.destroyed) { - return - } - this.queue - .add(() => task(this.db)) - .catch((err) => { - dbLogger.error(err, 'background queue task failed') - }) - } - - async processAll() { - await this.queue.onIdle() - } - - // On destroy we stop accepting new tasks, but complete all pending/in-progress tasks. - // The application calls this only once http connections have drained (tasks no longer being added). - async destroy() { - this.destroyed = true - await this.queue.onIdle() - } -} - -type Task = (db: PrimaryDatabase) => Promise diff --git a/packages/bsky/src/context.ts b/packages/bsky/src/context.ts index 9827badf644..ff154999cd4 100644 --- a/packages/bsky/src/context.ts +++ b/packages/bsky/src/context.ts @@ -3,16 +3,9 @@ import { IdResolver } from '@atproto/identity' import { AtpAgent } from '@atproto/api' import { Keypair } from '@atproto/crypto' import { createServiceJwt } from '@atproto/xrpc-server' -import { DatabaseCoordinator } from './db' import { ServerConfig } from './config' -import { ImageUriBuilder } from './image/uri' -import { Services } from './services' import * as auth from './auth' -import DidSqlCache from './did-cache' -import { BackgroundQueue } from './background' import { MountedAlgos } from './feed-gen/types' -import { LabelCache } from './label-cache' -import { NotificationServer } from './notifications' import { DataPlaneClient } from './data-plane/client' import { Hydrator } from './hydration/hydrator' import { Views } from './views' @@ -21,49 +14,20 @@ export class AppContext { public moderationPushAgent: AtpAgent | undefined constructor( private opts: { - db: DatabaseCoordinator - imgUriBuilder: ImageUriBuilder cfg: ServerConfig - services: Services dataplane: DataPlaneClient hydrator: Hydrator views: Views signingKey: Keypair idResolver: IdResolver - didCache: DidSqlCache - labelCache: LabelCache - backgroundQueue: BackgroundQueue - searchAgent?: AtpAgent algos: MountedAlgos - notifServer: NotificationServer }, - ) { - if (opts.cfg.moderationPushUrl) { - const url = new URL(opts.cfg.moderationPushUrl) - this.moderationPushAgent = new AtpAgent({ service: url.origin }) - this.moderationPushAgent.api.setHeader( - 'authorization', - auth.buildBasicAuth(url.username, url.password), - ) - } - } - - get db(): DatabaseCoordinator { - return this.opts.db - } - - get imgUriBuilder(): ImageUriBuilder { - return this.opts.imgUriBuilder - } + ) {} get cfg(): ServerConfig { return this.opts.cfg } - get services(): Services { - return this.opts.services - } - get dataplane(): DataPlaneClient { return this.opts.dataplane } @@ -88,22 +52,6 @@ export class AppContext { return this.opts.idResolver } - get didCache(): DidSqlCache { - return this.opts.didCache - } - - get labelCache(): LabelCache { - return this.opts.labelCache - } - - get notifServer(): NotificationServer { - return this.opts.notifServer - } - - get searchAgent(): AtpAgent | undefined { - return this.opts.searchAgent - } - get authVerifier() { return auth.authVerifier(this.idResolver, { aud: this.cfg.serverDid }) } @@ -139,10 +87,6 @@ export class AppContext { }) } - get backgroundQueue(): BackgroundQueue { - return this.opts.backgroundQueue - } - get algos(): MountedAlgos { return this.opts.algos } diff --git a/packages/bsky/src/db/coordinator.ts b/packages/bsky/src/data-plane/server/db/coordinator.ts similarity index 98% rename from packages/bsky/src/db/coordinator.ts rename to packages/bsky/src/data-plane/server/db/coordinator.ts index a8f4cc3016c..25ef305ed5a 100644 --- a/packages/bsky/src/db/coordinator.ts +++ b/packages/bsky/src/data-plane/server/db/coordinator.ts @@ -2,7 +2,7 @@ import { Migrator } from 'kysely' import PrimaryDatabase from './primary' import Database from './db' import { PgOptions } from './types' -import { dbLogger } from '../logger' +import { dbLogger } from '../../../logger' type ReplicaTag = 'timeline' | 'feed' | 'search' | 'thread' | '*' type ReplicaOptions = PgOptions & { tags?: ReplicaTag[] } diff --git a/packages/bsky/src/db/database-schema.ts b/packages/bsky/src/data-plane/server/db/database-schema.ts similarity index 100% rename from packages/bsky/src/db/database-schema.ts rename to packages/bsky/src/data-plane/server/db/database-schema.ts diff --git a/packages/bsky/src/db/db.ts b/packages/bsky/src/data-plane/server/db/db.ts similarity index 98% rename from packages/bsky/src/db/db.ts rename to packages/bsky/src/data-plane/server/db/db.ts index cb58eb4742b..35fa6d0857a 100644 --- a/packages/bsky/src/db/db.ts +++ b/packages/bsky/src/data-plane/server/db/db.ts @@ -3,7 +3,7 @@ import { Kysely, PostgresDialect } from 'kysely' import { Pool as PgPool, types as pgTypes } from 'pg' import DatabaseSchema, { DatabaseSchemaType } from './database-schema' import { PgOptions } from './types' -import { dbLogger } from '../logger' +import { dbLogger } from '../../../logger' export class Database { pool: PgPool diff --git a/packages/bsky/src/db/index.ts b/packages/bsky/src/data-plane/server/db/index.ts similarity index 100% rename from packages/bsky/src/db/index.ts rename to packages/bsky/src/data-plane/server/db/index.ts diff --git a/packages/bsky/src/db/migrations/20230309T045948368Z-init.ts b/packages/bsky/src/data-plane/server/db/migrations/20230309T045948368Z-init.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230309T045948368Z-init.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230309T045948368Z-init.ts diff --git a/packages/bsky/src/db/migrations/20230408T152211201Z-notification-init.ts b/packages/bsky/src/data-plane/server/db/migrations/20230408T152211201Z-notification-init.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230408T152211201Z-notification-init.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230408T152211201Z-notification-init.ts diff --git a/packages/bsky/src/db/migrations/20230417T210628672Z-moderation-init.ts b/packages/bsky/src/data-plane/server/db/migrations/20230417T210628672Z-moderation-init.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230417T210628672Z-moderation-init.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230417T210628672Z-moderation-init.ts diff --git a/packages/bsky/src/db/migrations/20230420T211446071Z-did-cache.ts b/packages/bsky/src/data-plane/server/db/migrations/20230420T211446071Z-did-cache.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230420T211446071Z-did-cache.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230420T211446071Z-did-cache.ts diff --git a/packages/bsky/src/db/migrations/20230427T194702079Z-notif-record-index.ts b/packages/bsky/src/data-plane/server/db/migrations/20230427T194702079Z-notif-record-index.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230427T194702079Z-notif-record-index.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230427T194702079Z-notif-record-index.ts diff --git a/packages/bsky/src/db/migrations/20230605T144730094Z-post-profile-aggs.ts b/packages/bsky/src/data-plane/server/db/migrations/20230605T144730094Z-post-profile-aggs.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230605T144730094Z-post-profile-aggs.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230605T144730094Z-post-profile-aggs.ts diff --git a/packages/bsky/src/db/migrations/20230607T211442112Z-feed-generator-init.ts b/packages/bsky/src/data-plane/server/db/migrations/20230607T211442112Z-feed-generator-init.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230607T211442112Z-feed-generator-init.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230607T211442112Z-feed-generator-init.ts diff --git a/packages/bsky/src/db/migrations/20230608T155101190Z-algo-whats-hot-view.ts b/packages/bsky/src/data-plane/server/db/migrations/20230608T155101190Z-algo-whats-hot-view.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230608T155101190Z-algo-whats-hot-view.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230608T155101190Z-algo-whats-hot-view.ts diff --git a/packages/bsky/src/db/migrations/20230608T201813132Z-mute-lists.ts b/packages/bsky/src/data-plane/server/db/migrations/20230608T201813132Z-mute-lists.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230608T201813132Z-mute-lists.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230608T201813132Z-mute-lists.ts diff --git a/packages/bsky/src/db/migrations/20230608T205147239Z-mutes.ts b/packages/bsky/src/data-plane/server/db/migrations/20230608T205147239Z-mutes.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230608T205147239Z-mutes.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230608T205147239Z-mutes.ts diff --git a/packages/bsky/src/db/migrations/20230609T153623961Z-blocks.ts b/packages/bsky/src/data-plane/server/db/migrations/20230609T153623961Z-blocks.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230609T153623961Z-blocks.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230609T153623961Z-blocks.ts diff --git a/packages/bsky/src/db/migrations/20230609T232122649Z-actor-deletion-indexes.ts b/packages/bsky/src/data-plane/server/db/migrations/20230609T232122649Z-actor-deletion-indexes.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230609T232122649Z-actor-deletion-indexes.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230609T232122649Z-actor-deletion-indexes.ts diff --git a/packages/bsky/src/db/migrations/20230610T203555962Z-suggested-follows.ts b/packages/bsky/src/data-plane/server/db/migrations/20230610T203555962Z-suggested-follows.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230610T203555962Z-suggested-follows.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230610T203555962Z-suggested-follows.ts diff --git a/packages/bsky/src/db/migrations/20230611T215300060Z-actor-state.ts b/packages/bsky/src/data-plane/server/db/migrations/20230611T215300060Z-actor-state.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230611T215300060Z-actor-state.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230611T215300060Z-actor-state.ts diff --git a/packages/bsky/src/db/migrations/20230620T161134972Z-post-langs.ts b/packages/bsky/src/data-plane/server/db/migrations/20230620T161134972Z-post-langs.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230620T161134972Z-post-langs.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230620T161134972Z-post-langs.ts diff --git a/packages/bsky/src/db/migrations/20230627T212437895Z-optional-handle.ts b/packages/bsky/src/data-plane/server/db/migrations/20230627T212437895Z-optional-handle.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230627T212437895Z-optional-handle.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230627T212437895Z-optional-handle.ts diff --git a/packages/bsky/src/db/migrations/20230629T220835893Z-remove-post-hierarchy.ts b/packages/bsky/src/data-plane/server/db/migrations/20230629T220835893Z-remove-post-hierarchy.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230629T220835893Z-remove-post-hierarchy.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230629T220835893Z-remove-post-hierarchy.ts diff --git a/packages/bsky/src/db/migrations/20230703T045536691Z-feed-and-label-indices.ts b/packages/bsky/src/data-plane/server/db/migrations/20230703T045536691Z-feed-and-label-indices.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230703T045536691Z-feed-and-label-indices.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230703T045536691Z-feed-and-label-indices.ts diff --git a/packages/bsky/src/db/migrations/20230720T164800037Z-posts-cursor-idx.ts b/packages/bsky/src/data-plane/server/db/migrations/20230720T164800037Z-posts-cursor-idx.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230720T164800037Z-posts-cursor-idx.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230720T164800037Z-posts-cursor-idx.ts diff --git a/packages/bsky/src/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts b/packages/bsky/src/data-plane/server/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts diff --git a/packages/bsky/src/db/migrations/20230808T172902639Z-repo-rev.ts b/packages/bsky/src/data-plane/server/db/migrations/20230808T172902639Z-repo-rev.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230808T172902639Z-repo-rev.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230808T172902639Z-repo-rev.ts diff --git a/packages/bsky/src/db/migrations/20230810T203349843Z-action-duration.ts b/packages/bsky/src/data-plane/server/db/migrations/20230810T203349843Z-action-duration.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230810T203349843Z-action-duration.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230810T203349843Z-action-duration.ts diff --git a/packages/bsky/src/db/migrations/20230817T195936007Z-native-notifications.ts b/packages/bsky/src/data-plane/server/db/migrations/20230817T195936007Z-native-notifications.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230817T195936007Z-native-notifications.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230817T195936007Z-native-notifications.ts diff --git a/packages/bsky/src/db/migrations/20230830T205507322Z-suggested-feeds.ts b/packages/bsky/src/data-plane/server/db/migrations/20230830T205507322Z-suggested-feeds.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230830T205507322Z-suggested-feeds.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230830T205507322Z-suggested-feeds.ts diff --git a/packages/bsky/src/db/migrations/20230904T211011773Z-block-lists.ts b/packages/bsky/src/data-plane/server/db/migrations/20230904T211011773Z-block-lists.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230904T211011773Z-block-lists.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230904T211011773Z-block-lists.ts diff --git a/packages/bsky/src/db/migrations/20230906T222220386Z-thread-gating.ts b/packages/bsky/src/data-plane/server/db/migrations/20230906T222220386Z-thread-gating.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230906T222220386Z-thread-gating.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230906T222220386Z-thread-gating.ts diff --git a/packages/bsky/src/db/migrations/20230920T213858047Z-add-tags-to-post.ts b/packages/bsky/src/data-plane/server/db/migrations/20230920T213858047Z-add-tags-to-post.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230920T213858047Z-add-tags-to-post.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230920T213858047Z-add-tags-to-post.ts diff --git a/packages/bsky/src/db/migrations/20230929T192920807Z-record-cursor-indexes.ts b/packages/bsky/src/data-plane/server/db/migrations/20230929T192920807Z-record-cursor-indexes.ts similarity index 100% rename from packages/bsky/src/db/migrations/20230929T192920807Z-record-cursor-indexes.ts rename to packages/bsky/src/data-plane/server/db/migrations/20230929T192920807Z-record-cursor-indexes.ts diff --git a/packages/bsky/src/db/migrations/20231003T202833377Z-create-moderation-subject-status.ts b/packages/bsky/src/data-plane/server/db/migrations/20231003T202833377Z-create-moderation-subject-status.ts similarity index 100% rename from packages/bsky/src/db/migrations/20231003T202833377Z-create-moderation-subject-status.ts rename to packages/bsky/src/data-plane/server/db/migrations/20231003T202833377Z-create-moderation-subject-status.ts diff --git a/packages/bsky/src/db/migrations/index.ts b/packages/bsky/src/data-plane/server/db/migrations/index.ts similarity index 100% rename from packages/bsky/src/db/migrations/index.ts rename to packages/bsky/src/data-plane/server/db/migrations/index.ts diff --git a/packages/bsky/src/db/migrations/provider.ts b/packages/bsky/src/data-plane/server/db/migrations/provider.ts similarity index 100% rename from packages/bsky/src/db/migrations/provider.ts rename to packages/bsky/src/data-plane/server/db/migrations/provider.ts diff --git a/packages/bsky/src/db/pagination.ts b/packages/bsky/src/data-plane/server/db/pagination.ts similarity index 100% rename from packages/bsky/src/db/pagination.ts rename to packages/bsky/src/data-plane/server/db/pagination.ts diff --git a/packages/bsky/src/db/primary.ts b/packages/bsky/src/data-plane/server/db/primary.ts similarity index 98% rename from packages/bsky/src/db/primary.ts rename to packages/bsky/src/data-plane/server/db/primary.ts index e6e69872fd5..61b5765967a 100644 --- a/packages/bsky/src/db/primary.ts +++ b/packages/bsky/src/data-plane/server/db/primary.ts @@ -15,7 +15,7 @@ import { wait } from '@atproto/common' import DatabaseSchema from './database-schema' import * as migrations from './migrations' import { CtxMigrationProvider } from './migrations/provider' -import { dbLogger as log } from '../logger' +import { dbLogger as log } from '../../../logger' import { PgOptions } from './types' import { Database } from './db' diff --git a/packages/bsky/src/db/tables/actor-block.ts b/packages/bsky/src/data-plane/server/db/tables/actor-block.ts similarity index 100% rename from packages/bsky/src/db/tables/actor-block.ts rename to packages/bsky/src/data-plane/server/db/tables/actor-block.ts diff --git a/packages/bsky/src/db/tables/actor-state.ts b/packages/bsky/src/data-plane/server/db/tables/actor-state.ts similarity index 100% rename from packages/bsky/src/db/tables/actor-state.ts rename to packages/bsky/src/data-plane/server/db/tables/actor-state.ts diff --git a/packages/bsky/src/db/tables/actor-sync.ts b/packages/bsky/src/data-plane/server/db/tables/actor-sync.ts similarity index 100% rename from packages/bsky/src/db/tables/actor-sync.ts rename to packages/bsky/src/data-plane/server/db/tables/actor-sync.ts diff --git a/packages/bsky/src/db/tables/actor.ts b/packages/bsky/src/data-plane/server/db/tables/actor.ts similarity index 100% rename from packages/bsky/src/db/tables/actor.ts rename to packages/bsky/src/data-plane/server/db/tables/actor.ts diff --git a/packages/bsky/src/db/tables/algo.ts b/packages/bsky/src/data-plane/server/db/tables/algo.ts similarity index 100% rename from packages/bsky/src/db/tables/algo.ts rename to packages/bsky/src/data-plane/server/db/tables/algo.ts diff --git a/packages/bsky/src/db/tables/did-cache.ts b/packages/bsky/src/data-plane/server/db/tables/did-cache.ts similarity index 100% rename from packages/bsky/src/db/tables/did-cache.ts rename to packages/bsky/src/data-plane/server/db/tables/did-cache.ts diff --git a/packages/bsky/src/db/tables/duplicate-record.ts b/packages/bsky/src/data-plane/server/db/tables/duplicate-record.ts similarity index 100% rename from packages/bsky/src/db/tables/duplicate-record.ts rename to packages/bsky/src/data-plane/server/db/tables/duplicate-record.ts diff --git a/packages/bsky/src/db/tables/feed-generator.ts b/packages/bsky/src/data-plane/server/db/tables/feed-generator.ts similarity index 100% rename from packages/bsky/src/db/tables/feed-generator.ts rename to packages/bsky/src/data-plane/server/db/tables/feed-generator.ts diff --git a/packages/bsky/src/db/tables/feed-item.ts b/packages/bsky/src/data-plane/server/db/tables/feed-item.ts similarity index 100% rename from packages/bsky/src/db/tables/feed-item.ts rename to packages/bsky/src/data-plane/server/db/tables/feed-item.ts diff --git a/packages/bsky/src/db/tables/follow.ts b/packages/bsky/src/data-plane/server/db/tables/follow.ts similarity index 100% rename from packages/bsky/src/db/tables/follow.ts rename to packages/bsky/src/data-plane/server/db/tables/follow.ts diff --git a/packages/bsky/src/db/tables/label.ts b/packages/bsky/src/data-plane/server/db/tables/label.ts similarity index 100% rename from packages/bsky/src/db/tables/label.ts rename to packages/bsky/src/data-plane/server/db/tables/label.ts diff --git a/packages/bsky/src/db/tables/like.ts b/packages/bsky/src/data-plane/server/db/tables/like.ts similarity index 100% rename from packages/bsky/src/db/tables/like.ts rename to packages/bsky/src/data-plane/server/db/tables/like.ts diff --git a/packages/bsky/src/db/tables/list-block.ts b/packages/bsky/src/data-plane/server/db/tables/list-block.ts similarity index 100% rename from packages/bsky/src/db/tables/list-block.ts rename to packages/bsky/src/data-plane/server/db/tables/list-block.ts diff --git a/packages/bsky/src/db/tables/list-item.ts b/packages/bsky/src/data-plane/server/db/tables/list-item.ts similarity index 100% rename from packages/bsky/src/db/tables/list-item.ts rename to packages/bsky/src/data-plane/server/db/tables/list-item.ts diff --git a/packages/bsky/src/db/tables/list-mute.ts b/packages/bsky/src/data-plane/server/db/tables/list-mute.ts similarity index 100% rename from packages/bsky/src/db/tables/list-mute.ts rename to packages/bsky/src/data-plane/server/db/tables/list-mute.ts diff --git a/packages/bsky/src/db/tables/list.ts b/packages/bsky/src/data-plane/server/db/tables/list.ts similarity index 100% rename from packages/bsky/src/db/tables/list.ts rename to packages/bsky/src/data-plane/server/db/tables/list.ts diff --git a/packages/bsky/src/db/tables/moderation.ts b/packages/bsky/src/data-plane/server/db/tables/moderation.ts similarity index 100% rename from packages/bsky/src/db/tables/moderation.ts rename to packages/bsky/src/data-plane/server/db/tables/moderation.ts diff --git a/packages/bsky/src/db/tables/mute.ts b/packages/bsky/src/data-plane/server/db/tables/mute.ts similarity index 100% rename from packages/bsky/src/db/tables/mute.ts rename to packages/bsky/src/data-plane/server/db/tables/mute.ts diff --git a/packages/bsky/src/db/tables/notification-push-token.ts b/packages/bsky/src/data-plane/server/db/tables/notification-push-token.ts similarity index 100% rename from packages/bsky/src/db/tables/notification-push-token.ts rename to packages/bsky/src/data-plane/server/db/tables/notification-push-token.ts diff --git a/packages/bsky/src/db/tables/notification.ts b/packages/bsky/src/data-plane/server/db/tables/notification.ts similarity index 100% rename from packages/bsky/src/db/tables/notification.ts rename to packages/bsky/src/data-plane/server/db/tables/notification.ts diff --git a/packages/bsky/src/db/tables/post-agg.ts b/packages/bsky/src/data-plane/server/db/tables/post-agg.ts similarity index 100% rename from packages/bsky/src/db/tables/post-agg.ts rename to packages/bsky/src/data-plane/server/db/tables/post-agg.ts diff --git a/packages/bsky/src/db/tables/post-embed.ts b/packages/bsky/src/data-plane/server/db/tables/post-embed.ts similarity index 100% rename from packages/bsky/src/db/tables/post-embed.ts rename to packages/bsky/src/data-plane/server/db/tables/post-embed.ts diff --git a/packages/bsky/src/db/tables/post.ts b/packages/bsky/src/data-plane/server/db/tables/post.ts similarity index 100% rename from packages/bsky/src/db/tables/post.ts rename to packages/bsky/src/data-plane/server/db/tables/post.ts diff --git a/packages/bsky/src/db/tables/profile-agg.ts b/packages/bsky/src/data-plane/server/db/tables/profile-agg.ts similarity index 100% rename from packages/bsky/src/db/tables/profile-agg.ts rename to packages/bsky/src/data-plane/server/db/tables/profile-agg.ts diff --git a/packages/bsky/src/db/tables/profile.ts b/packages/bsky/src/data-plane/server/db/tables/profile.ts similarity index 100% rename from packages/bsky/src/db/tables/profile.ts rename to packages/bsky/src/data-plane/server/db/tables/profile.ts diff --git a/packages/bsky/src/db/tables/record.ts b/packages/bsky/src/data-plane/server/db/tables/record.ts similarity index 100% rename from packages/bsky/src/db/tables/record.ts rename to packages/bsky/src/data-plane/server/db/tables/record.ts diff --git a/packages/bsky/src/db/tables/repost.ts b/packages/bsky/src/data-plane/server/db/tables/repost.ts similarity index 100% rename from packages/bsky/src/db/tables/repost.ts rename to packages/bsky/src/data-plane/server/db/tables/repost.ts diff --git a/packages/bsky/src/db/tables/subscription.ts b/packages/bsky/src/data-plane/server/db/tables/subscription.ts similarity index 100% rename from packages/bsky/src/db/tables/subscription.ts rename to packages/bsky/src/data-plane/server/db/tables/subscription.ts diff --git a/packages/bsky/src/db/tables/suggested-feed.ts b/packages/bsky/src/data-plane/server/db/tables/suggested-feed.ts similarity index 100% rename from packages/bsky/src/db/tables/suggested-feed.ts rename to packages/bsky/src/data-plane/server/db/tables/suggested-feed.ts diff --git a/packages/bsky/src/db/tables/suggested-follow.ts b/packages/bsky/src/data-plane/server/db/tables/suggested-follow.ts similarity index 100% rename from packages/bsky/src/db/tables/suggested-follow.ts rename to packages/bsky/src/data-plane/server/db/tables/suggested-follow.ts diff --git a/packages/bsky/src/db/tables/thread-gate.ts b/packages/bsky/src/data-plane/server/db/tables/thread-gate.ts similarity index 100% rename from packages/bsky/src/db/tables/thread-gate.ts rename to packages/bsky/src/data-plane/server/db/tables/thread-gate.ts diff --git a/packages/bsky/src/db/tables/view-param.ts b/packages/bsky/src/data-plane/server/db/tables/view-param.ts similarity index 100% rename from packages/bsky/src/db/tables/view-param.ts rename to packages/bsky/src/data-plane/server/db/tables/view-param.ts diff --git a/packages/bsky/src/db/types.ts b/packages/bsky/src/data-plane/server/db/types.ts similarity index 100% rename from packages/bsky/src/db/types.ts rename to packages/bsky/src/data-plane/server/db/types.ts diff --git a/packages/bsky/src/db/util.ts b/packages/bsky/src/data-plane/server/db/util.ts similarity index 100% rename from packages/bsky/src/db/util.ts rename to packages/bsky/src/data-plane/server/db/util.ts diff --git a/packages/bsky/src/did-cache.ts b/packages/bsky/src/data-plane/server/did-cache.ts similarity index 98% rename from packages/bsky/src/did-cache.ts rename to packages/bsky/src/data-plane/server/did-cache.ts index e08b09ca7e7..3fed82df5ed 100644 --- a/packages/bsky/src/did-cache.ts +++ b/packages/bsky/src/data-plane/server/did-cache.ts @@ -2,7 +2,7 @@ import PQueue from 'p-queue' import { CacheResult, DidCache, DidDocument } from '@atproto/identity' import { PrimaryDatabase } from './db' import { excluded } from './db/util' -import { dbLogger } from './logger' +import { dbLogger } from '../../logger' export class DidSqlCache implements DidCache { public pQueue: PQueue | null //null during teardown diff --git a/packages/bsky/src/services/indexing/index.ts b/packages/bsky/src/data-plane/server/indexing/index.ts similarity index 85% rename from packages/bsky/src/services/indexing/index.ts rename to packages/bsky/src/data-plane/server/indexing/index.ts index 44dd9c3c986..e18f01bdb2a 100644 --- a/packages/bsky/src/services/indexing/index.ts +++ b/packages/bsky/src/data-plane/server/indexing/index.ts @@ -13,7 +13,8 @@ import { AtUri } from '@atproto/syntax' import { IdResolver, getPds } from '@atproto/identity' import { DAY, HOUR } from '@atproto/common' import { ValidationError } from '@atproto/lexicon' -import { PrimaryDatabase } from '../../db' +import { PrimaryDatabase } from '../db' +import { Actor } from '../db/tables/actor' import * as Post from './plugins/post' import * as Threadgate from './plugins/thread-gate' import * as Like from './plugins/like' @@ -26,12 +27,8 @@ import * as ListBlock from './plugins/list-block' import * as Block from './plugins/block' import * as FeedGenerator from './plugins/feed-generator' import RecordProcessor from './processor' -import { subLogger } from '../../logger' -import { retryHttp } from '../../util/retry' -import { BackgroundQueue } from '../../background' -import { NotificationServer } from '../../notifications' -import { AutoModerator } from '../../auto-moderator' -import { Actor } from '../../db/tables/actor' +import { subLogger } from '../../../logger' +import { retryHttp } from '../../../util/retry' export class IndexingService { records: { @@ -48,51 +45,29 @@ export class IndexingService { feedGenerator: FeedGenerator.PluginType } - constructor( - public db: PrimaryDatabase, - public idResolver: IdResolver, - public autoMod: AutoModerator, - public backgroundQueue: BackgroundQueue, - public notifServer?: NotificationServer, - ) { + constructor(public db: PrimaryDatabase, public idResolver: IdResolver) { this.records = { - post: Post.makePlugin(this.db, backgroundQueue, notifServer), - threadGate: Threadgate.makePlugin(this.db, backgroundQueue, notifServer), - like: Like.makePlugin(this.db, backgroundQueue, notifServer), - repost: Repost.makePlugin(this.db, backgroundQueue, notifServer), - follow: Follow.makePlugin(this.db, backgroundQueue, notifServer), - profile: Profile.makePlugin(this.db, backgroundQueue, notifServer), - list: List.makePlugin(this.db, backgroundQueue, notifServer), - listItem: ListItem.makePlugin(this.db, backgroundQueue, notifServer), - listBlock: ListBlock.makePlugin(this.db, backgroundQueue, notifServer), - block: Block.makePlugin(this.db, backgroundQueue, notifServer), - feedGenerator: FeedGenerator.makePlugin( - this.db, - backgroundQueue, - notifServer, - ), + post: Post.makePlugin(this.db), + threadGate: Threadgate.makePlugin(this.db), + like: Like.makePlugin(this.db), + repost: Repost.makePlugin(this.db), + follow: Follow.makePlugin(this.db), + profile: Profile.makePlugin(this.db), + list: List.makePlugin(this.db), + listItem: ListItem.makePlugin(this.db), + listBlock: ListBlock.makePlugin(this.db), + block: Block.makePlugin(this.db), + feedGenerator: FeedGenerator.makePlugin(this.db), } } transact(txn: PrimaryDatabase) { txn.assertTransaction() - return new IndexingService( - txn, - this.idResolver, - this.autoMod, - this.backgroundQueue, - this.notifServer, - ) + return new IndexingService(txn, this.idResolver) } - static creator( - idResolver: IdResolver, - autoMod: AutoModerator, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, - ) { - return (db: PrimaryDatabase) => - new IndexingService(db, idResolver, autoMod, backgroundQueue, notifServer) + static creator(idResolver: IdResolver) { + return (db: PrimaryDatabase) => new IndexingService(db, idResolver) } async indexRecord( @@ -114,9 +89,6 @@ export class IndexingService { await indexer.updateRecord(uri, cid, obj, timestamp) } }) - if (!opts?.disableLabels) { - this.autoMod.processRecord(uri, cid, obj) - } } async deleteRecord(uri: AtUri, cascading = false) { @@ -170,10 +142,6 @@ export class IndexingService { .onConflict((oc) => oc.column('did').doUpdateSet(actorInfo)) .returning('did') .executeTakeFirst() - - if (handle) { - this.autoMod.processHandle(handle, did) - } } async indexRepo(did: string, commit?: string) { diff --git a/packages/bsky/src/services/indexing/plugins/block.ts b/packages/bsky/src/data-plane/server/indexing/plugins/block.ts similarity index 75% rename from packages/bsky/src/services/indexing/plugins/block.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/block.ts index 88e62b6f5ac..495c5be6c93 100644 --- a/packages/bsky/src/services/indexing/plugins/block.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/block.ts @@ -1,13 +1,11 @@ import { Selectable } from 'kysely' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as Block from '../../../lexicon/types/app/bsky/graph/block' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as Block from '../../../../lexicon/types/app/bsky/graph/block' +import * as lex from '../../../../lexicon/lexicons' +import { PrimaryDatabase } from '../../db' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../../db' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' const lexId = lex.ids.AppBskyGraphBlock type IndexedBlock = Selectable @@ -71,12 +69,8 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/feed-generator.ts b/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts similarity index 75% rename from packages/bsky/src/services/indexing/plugins/feed-generator.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts index be5435966f1..f2876eef053 100644 --- a/packages/bsky/src/services/indexing/plugins/feed-generator.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts @@ -1,13 +1,11 @@ import { Selectable } from 'kysely' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as FeedGenerator from '../../../lexicon/types/app/bsky/feed/generator' -import * as lex from '../../../lexicon/lexicons' -import { PrimaryDatabase } from '../../../db' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' -import { BackgroundQueue } from '../../../background' +import * as FeedGenerator from '../../../../lexicon/types/app/bsky/feed/generator' +import * as lex from '../../../../lexicon/lexicons' +import { PrimaryDatabase } from '../../db' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { NotificationServer } from '../../../notifications' const lexId = lex.ids.AppBskyFeedGenerator type IndexedFeedGenerator = Selectable @@ -70,12 +68,8 @@ export type PluginType = RecordProcessor< IndexedFeedGenerator > -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/follow.ts b/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts similarity index 83% rename from packages/bsky/src/services/indexing/plugins/follow.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/follow.ts index 8655c7eba71..dfdfbdb9631 100644 --- a/packages/bsky/src/services/indexing/plugins/follow.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts @@ -1,14 +1,12 @@ import { Selectable } from 'kysely' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as Follow from '../../../lexicon/types/app/bsky/graph/follow' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as Follow from '../../../../lexicon/types/app/bsky/graph/follow' +import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../../db' -import { countAll, excluded } from '../../../db/util' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' +import { PrimaryDatabase } from '../../db' +import { countAll, excluded } from '../../db/util' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' const lexId = lex.ids.AppBskyGraphFollow type IndexedFollow = Selectable @@ -118,12 +116,8 @@ const updateAggregates = async (db: DatabaseSchema, follow: IndexedFollow) => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/like.ts b/packages/bsky/src/data-plane/server/indexing/plugins/like.ts similarity index 81% rename from packages/bsky/src/services/indexing/plugins/like.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/like.ts index 703800f67c8..431d76a8656 100644 --- a/packages/bsky/src/services/indexing/plugins/like.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/like.ts @@ -1,14 +1,12 @@ import { Selectable } from 'kysely' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as Like from '../../../lexicon/types/app/bsky/feed/like' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as Like from '../../../../lexicon/types/app/bsky/feed/like' +import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { countAll, excluded } from '../../../db/util' -import { PrimaryDatabase } from '../../../db' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' +import { countAll, excluded } from '../../db/util' +import { PrimaryDatabase } from '../../db' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' const lexId = lex.ids.AppBskyFeedLike type IndexedLike = Selectable @@ -108,12 +106,8 @@ const updateAggregates = async (db: DatabaseSchema, like: IndexedLike) => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/list-block.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts similarity index 75% rename from packages/bsky/src/services/indexing/plugins/list-block.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts index 3040f1aa3f9..3de8889808f 100644 --- a/packages/bsky/src/services/indexing/plugins/list-block.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts @@ -1,13 +1,11 @@ import { Selectable } from 'kysely' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as ListBlock from '../../../lexicon/types/app/bsky/graph/listblock' -import * as lex from '../../../lexicon/lexicons' -import { PrimaryDatabase } from '../../../db' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as ListBlock from '../../../../lexicon/types/app/bsky/graph/listblock' +import * as lex from '../../../../lexicon/lexicons' +import { PrimaryDatabase } from '../../db' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' const lexId = lex.ids.AppBskyGraphListblock type IndexedListBlock = Selectable @@ -71,12 +69,8 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/list-item.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts similarity index 78% rename from packages/bsky/src/services/indexing/plugins/list-item.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts index 9e08145b23e..778fa599b62 100644 --- a/packages/bsky/src/services/indexing/plugins/list-item.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts @@ -1,14 +1,12 @@ import { Selectable } from 'kysely' +import { InvalidRequestError } from '@atproto/xrpc-server' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as ListItem from '../../../lexicon/types/app/bsky/graph/listitem' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as ListItem from '../../../../lexicon/types/app/bsky/graph/listitem' +import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { InvalidRequestError } from '@atproto/xrpc-server' -import { PrimaryDatabase } from '../../../db' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' +import { PrimaryDatabase } from '../../db' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' const lexId = lex.ids.AppBskyGraphListitem type IndexedListItem = Selectable @@ -79,12 +77,8 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/list.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list.ts similarity index 74% rename from packages/bsky/src/services/indexing/plugins/list.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/list.ts index 0d078572501..09b04834293 100644 --- a/packages/bsky/src/services/indexing/plugins/list.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list.ts @@ -1,13 +1,11 @@ import { Selectable } from 'kysely' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as List from '../../../lexicon/types/app/bsky/graph/list' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as List from '../../../../lexicon/types/app/bsky/graph/list' +import * as lex from '../../../../lexicon/lexicons' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../../db' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' +import { PrimaryDatabase } from '../../db' const lexId = lex.ids.AppBskyGraphList type IndexedList = Selectable @@ -67,12 +65,8 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/post.ts b/packages/bsky/src/data-plane/server/indexing/plugins/post.ts similarity index 89% rename from packages/bsky/src/services/indexing/plugins/post.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/post.ts index af581b3bdff..bb162f139b3 100644 --- a/packages/bsky/src/services/indexing/plugins/post.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/post.ts @@ -5,27 +5,29 @@ import { jsonStringToLex } from '@atproto/lexicon' import { Record as PostRecord, ReplyRef, -} from '../../../lexicon/types/app/bsky/feed/post' -import { Record as GateRecord } from '../../../lexicon/types/app/bsky/feed/threadgate' -import { isMain as isEmbedImage } from '../../../lexicon/types/app/bsky/embed/images' -import { isMain as isEmbedExternal } from '../../../lexicon/types/app/bsky/embed/external' -import { isMain as isEmbedRecord } from '../../../lexicon/types/app/bsky/embed/record' -import { isMain as isEmbedRecordWithMedia } from '../../../lexicon/types/app/bsky/embed/recordWithMedia' +} from '../../../../lexicon/types/app/bsky/feed/post' +import { Record as GateRecord } from '../../../../lexicon/types/app/bsky/feed/threadgate' +import { isMain as isEmbedImage } from '../../../../lexicon/types/app/bsky/embed/images' +import { isMain as isEmbedExternal } from '../../../../lexicon/types/app/bsky/embed/external' +import { isMain as isEmbedRecord } from '../../../../lexicon/types/app/bsky/embed/record' +import { isMain as isEmbedRecordWithMedia } from '../../../../lexicon/types/app/bsky/embed/recordWithMedia' import { isMention, isLink, -} from '../../../lexicon/types/app/bsky/richtext/facet' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +} from '../../../../lexicon/types/app/bsky/richtext/facet' +import * as lex from '../../../../lexicon/lexicons' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { Notification } from '../../../db/tables/notification' -import { PrimaryDatabase } from '../../../db' -import { countAll, excluded } from '../../../db/util' -import { BackgroundQueue } from '../../../background' -import { getAncestorsAndSelfQb, getDescendentsQb } from '../../util/post' -import { NotificationServer } from '../../../notifications' -import * as feedutil from '../../feed/util' -import { postToThreadgateUri } from '../../feed/util' +import { Notification } from '../../db/tables/notification' +import { PrimaryDatabase } from '../../db' +import { countAll, excluded } from '../../db/util' +import { + getAncestorsAndSelfQb, + getDescendentsQb, + invalidReplyRoot as checkInvalidReplyRoot, + violatesThreadGate as checkViolatesThreadGate, + postToThreadgateUri, +} from '../../util' type Notif = Insertable type Post = Selectable @@ -391,12 +393,8 @@ const updateAggregates = async (db: DatabaseSchema, postIdx: IndexedPost) => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, @@ -427,9 +425,9 @@ async function validateReply( const replyRefs = await getReplyRefs(db, reply) // check reply const invalidReplyRoot = - !replyRefs.parent || feedutil.invalidReplyRoot(reply, replyRefs.parent) + !replyRefs.parent || checkInvalidReplyRoot(reply, replyRefs.parent) // check interaction - const violatesThreadGate = await feedutil.violatesThreadGate( + const violatesThreadGate = await checkViolatesThreadGate( db, creator, new AtUri(reply.root.uri).hostname, diff --git a/packages/bsky/src/services/indexing/plugins/profile.ts b/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts similarity index 73% rename from packages/bsky/src/services/indexing/plugins/profile.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/profile.ts index ea0c8f07f98..9ea12135e30 100644 --- a/packages/bsky/src/services/indexing/plugins/profile.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts @@ -1,12 +1,10 @@ import { AtUri } from '@atproto/syntax' import { CID } from 'multiformats/cid' -import * as Profile from '../../../lexicon/types/app/bsky/actor/profile' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as Profile from '../../../../lexicon/types/app/bsky/actor/profile' +import * as lex from '../../../../lexicon/lexicons' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../../db' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' +import { PrimaryDatabase } from '../../db' const lexId = lex.ids.AppBskyActorProfile type IndexedProfile = DatabaseSchemaType['profile'] @@ -63,12 +61,8 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/repost.ts b/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts similarity index 84% rename from packages/bsky/src/services/indexing/plugins/repost.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/repost.ts index ea8d517dc52..f31f537daf3 100644 --- a/packages/bsky/src/services/indexing/plugins/repost.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts @@ -1,14 +1,12 @@ import { Selectable } from 'kysely' import { CID } from 'multiformats/cid' import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' -import * as Repost from '../../../lexicon/types/app/bsky/feed/repost' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as Repost from '../../../../lexicon/types/app/bsky/feed/repost' +import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../../db' -import { countAll, excluded } from '../../../db/util' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' +import { PrimaryDatabase } from '../../db' +import { countAll, excluded } from '../../db/util' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' const lexId = lex.ids.AppBskyFeedRepost type IndexedRepost = Selectable @@ -133,12 +131,8 @@ const updateAggregates = async (db: DatabaseSchema, repost: IndexedRepost) => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/plugins/thread-gate.ts b/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts similarity index 77% rename from packages/bsky/src/services/indexing/plugins/thread-gate.ts rename to packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts index 9a58547f2da..80596a05080 100644 --- a/packages/bsky/src/services/indexing/plugins/thread-gate.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts @@ -1,13 +1,11 @@ import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { InvalidRequestError } from '@atproto/xrpc-server' import { CID } from 'multiformats/cid' -import * as Threadgate from '../../../lexicon/types/app/bsky/feed/threadgate' -import * as lex from '../../../lexicon/lexicons' -import { DatabaseSchema, DatabaseSchemaType } from '../../../db/database-schema' +import * as Threadgate from '../../../../lexicon/types/app/bsky/feed/threadgate' +import * as lex from '../../../../lexicon/lexicons' +import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' +import { PrimaryDatabase } from '../../db' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../../db' -import { BackgroundQueue } from '../../../background' -import { NotificationServer } from '../../../notifications' const lexId = lex.ids.AppBskyFeedThreadgate type IndexedGate = DatabaseSchemaType['thread_gate'] @@ -76,12 +74,8 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = ( - db: PrimaryDatabase, - backgroundQueue: BackgroundQueue, - notifServer?: NotificationServer, -): PluginType => { - return new RecordProcessor(db, backgroundQueue, notifServer, { +export const makePlugin = (db: PrimaryDatabase): PluginType => { + return new RecordProcessor(db, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/services/indexing/processor.ts b/packages/bsky/src/data-plane/server/indexing/processor.ts similarity index 78% rename from packages/bsky/src/services/indexing/processor.ts rename to packages/bsky/src/data-plane/server/indexing/processor.ts index 2a02c61125e..1dc540444d8 100644 --- a/packages/bsky/src/services/indexing/processor.ts +++ b/packages/bsky/src/data-plane/server/indexing/processor.ts @@ -1,15 +1,12 @@ import { Insertable } from 'kysely' import { CID } from 'multiformats/cid' import { AtUri } from '@atproto/syntax' -import { jsonStringToLex, stringifyLex } from '@atproto/lexicon' -import DatabaseSchema from '../../db/database-schema' -import { lexicons } from '../../lexicon/lexicons' -import { Notification } from '../../db/tables/notification' import { chunkArray } from '@atproto/common' -import { PrimaryDatabase } from '../../db' -import { BackgroundQueue } from '../../background' -import { NotificationServer } from '../../notifications' -import { dbLogger } from '../../logger' +import { jsonStringToLex, stringifyLex } from '@atproto/lexicon' +import { lexicons } from '../../../lexicon/lexicons' +import { PrimaryDatabase } from '../db' +import DatabaseSchema from '../db/database-schema' +import { Notification } from '../db/tables/notification' // @NOTE re: insertions and deletions. Due to how record updates are handled, // (insertFn) should have the same effect as (insertFn -> deleteFn -> insertFn). @@ -43,8 +40,6 @@ export class RecordProcessor { db: DatabaseSchema constructor( private appDb: PrimaryDatabase, - private backgroundQueue: BackgroundQueue, - private notifServer: NotificationServer | undefined, private params: RecordProcessorParams, ) { this.db = appDb.db @@ -91,7 +86,7 @@ export class RecordProcessor { timestamp, ) if (inserted) { - this.aggregateOnCommit(inserted) + await this.aggregateOnCommit(inserted) if (!opts?.disableNotifs) { await this.handleNotifs({ inserted }) } @@ -158,7 +153,7 @@ export class RecordProcessor { // If a record was updated but hadn't been indexed yet, treat it like a plain insert. return this.insertRecord(uri, cid, obj, timestamp) } - this.aggregateOnCommit(deleted) + await this.aggregateOnCommit(deleted) const inserted = await this.params.insertFn( this.db, uri, @@ -171,7 +166,7 @@ export class RecordProcessor { 'Record update failed: removed from index but could not be replaced', ) } - this.aggregateOnCommit(inserted) + await this.aggregateOnCommit(inserted) if (!opts?.disableNotifs) { await this.handleNotifs({ inserted, deleted }) } @@ -188,7 +183,7 @@ export class RecordProcessor { .execute() const deleted = await this.params.deleteFn(this.db, uri) if (!deleted) return - this.aggregateOnCommit(deleted) + await this.aggregateOnCommit(deleted) if (cascading) { await this.db .deleteFrom('duplicate_record') @@ -220,7 +215,7 @@ export class RecordProcessor { found.indexedAt, ) if (inserted) { - this.aggregateOnCommit(inserted) + await this.aggregateOnCommit(inserted) } await this.handleNotifs({ deleted, inserted: inserted ?? undefined }) } @@ -229,7 +224,6 @@ export class RecordProcessor { async handleNotifs(op: { deleted?: S; inserted?: S }) { let notifs: Notif[] = [] const runOnCommit: ((db: PrimaryDatabase) => Promise)[] = [] - const sendOnCommit: (() => Promise)[] = [] if (op.deleted) { const forDelete = this.params.notifsForDelete( op.deleted, @@ -253,46 +247,16 @@ export class RecordProcessor { runOnCommit.push(async (db) => { await db.db.insertInto('notification').values(chunk).execute() }) - if (this.notifServer) { - const notifServer = this.notifServer - sendOnCommit.push(async () => { - try { - const preparedNotifs = await notifServer.prepareNotifsToSend(chunk) - await notifServer.processNotifications(preparedNotifs) - } catch (error) { - dbLogger.error({ error }, 'error sending push notifications') - } - }) - } } - if (runOnCommit.length) { - // Need to ensure notif deletion always happens before creation, otherwise delete may clobber in a race. - this.appDb.onCommit(() => { - this.backgroundQueue.add(async (db) => { - for (const fn of runOnCommit) { - await fn(db) - } - }) - }) - } - if (sendOnCommit.length) { - // Need to ensure notif deletion always happens before creation, otherwise delete may clobber in a race. - this.appDb.onCommit(() => { - this.backgroundQueue.add(async () => { - for (const fn of sendOnCommit) { - await fn() - } - }) - }) + // Need to ensure notif deletion always happens before creation, otherwise delete may clobber in a race. + for (const fn of runOnCommit) { + await fn(this.appDb) // these could be backgrounded } } - aggregateOnCommit(indexed: S) { + async aggregateOnCommit(indexed: S) { const { updateAggregates } = this.params - if (!updateAggregates) return - this.appDb.onCommit(() => { - this.backgroundQueue.add((db) => updateAggregates(db.db, indexed)) - }) + await updateAggregates?.(this.db, indexed) } } diff --git a/packages/bsky/src/data-plane/server/routes/blocks.ts b/packages/bsky/src/data-plane/server/routes/blocks.ts index 333163945dc..af39d2859cb 100644 --- a/packages/bsky/src/data-plane/server/routes/blocks.ts +++ b/packages/bsky/src/data-plane/server/routes/blocks.ts @@ -1,7 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { TimeCidKeyset, paginate } from '../../../db/pagination' +import { Database } from '../db' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getBidirectionalBlock(req) { diff --git a/packages/bsky/src/data-plane/server/routes/feed-gens.ts b/packages/bsky/src/data-plane/server/routes/feed-gens.ts index 2dc46a98386..5c853e129b2 100644 --- a/packages/bsky/src/data-plane/server/routes/feed-gens.ts +++ b/packages/bsky/src/data-plane/server/routes/feed-gens.ts @@ -1,7 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { TimeCidKeyset, paginate } from '../../../db/pagination' +import { Database } from '../db' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getActorFeeds(req) { diff --git a/packages/bsky/src/data-plane/server/routes/feeds.ts b/packages/bsky/src/data-plane/server/routes/feeds.ts index 79a01a48530..6eac5dfe4e5 100644 --- a/packages/bsky/src/data-plane/server/routes/feeds.ts +++ b/packages/bsky/src/data-plane/server/routes/feeds.ts @@ -1,7 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { TimeCidKeyset, paginate } from '../../../db/pagination' +import { Database } from '../db' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getAuthorFeed(req) { diff --git a/packages/bsky/src/data-plane/server/routes/follows.ts b/packages/bsky/src/data-plane/server/routes/follows.ts index ecab294f3c3..d9755de9064 100644 --- a/packages/bsky/src/data-plane/server/routes/follows.ts +++ b/packages/bsky/src/data-plane/server/routes/follows.ts @@ -1,8 +1,8 @@ +import { keyBy } from '@atproto/common' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { TimeCidKeyset, paginate } from '../../../db/pagination' -import { keyBy } from '@atproto/common' +import { Database } from '../db' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getActorFollowsActors(req) { diff --git a/packages/bsky/src/data-plane/server/routes/index.ts b/packages/bsky/src/data-plane/server/routes/index.ts index 4eb4203a52e..eb7c9cb57b7 100644 --- a/packages/bsky/src/data-plane/server/routes/index.ts +++ b/packages/bsky/src/data-plane/server/routes/index.ts @@ -20,7 +20,7 @@ import search from './search' import suggestions from './suggestions' import sync from './sync' import threads from './threads' -import { Database } from '../../../db' +import { Database } from '../db' export default (db: Database) => (router: ConnectRouter) => router.service(Service, { diff --git a/packages/bsky/src/data-plane/server/routes/labels.ts b/packages/bsky/src/data-plane/server/routes/labels.ts index 917c36a7ee2..4e004a8ce6c 100644 --- a/packages/bsky/src/data-plane/server/routes/labels.ts +++ b/packages/bsky/src/data-plane/server/routes/labels.ts @@ -1,7 +1,7 @@ +import * as ui8 from 'uint8arrays' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import * as ui8 from 'uint8arrays' +import { Database } from '../db' export default (db: Database): Partial> => ({ async getLabels(req) { diff --git a/packages/bsky/src/data-plane/server/routes/likes.ts b/packages/bsky/src/data-plane/server/routes/likes.ts index 9765e05c401..881037dd069 100644 --- a/packages/bsky/src/data-plane/server/routes/likes.ts +++ b/packages/bsky/src/data-plane/server/routes/likes.ts @@ -1,7 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { TimeCidKeyset, paginate } from '../../../db/pagination' +import { Database } from '../db' +import { TimeCidKeyset, paginate } from '../db/pagination' import { keyBy } from '@atproto/common' export default (db: Database): Partial> => ({ diff --git a/packages/bsky/src/data-plane/server/routes/lists.ts b/packages/bsky/src/data-plane/server/routes/lists.ts index 401071c313e..b0bac1c7c64 100644 --- a/packages/bsky/src/data-plane/server/routes/lists.ts +++ b/packages/bsky/src/data-plane/server/routes/lists.ts @@ -1,9 +1,9 @@ +import { keyBy } from '@atproto/common' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { countAll } from '../../../db/util' -import { keyBy } from '@atproto/common' -import { TimeCidKeyset, paginate } from '../../../db/pagination' +import { Database } from '../db' +import { countAll } from '../db/util' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getActorLists(req) { diff --git a/packages/bsky/src/data-plane/server/routes/moderation.ts b/packages/bsky/src/data-plane/server/routes/moderation.ts index 2f86aa42462..64a3c343e9f 100644 --- a/packages/bsky/src/data-plane/server/routes/moderation.ts +++ b/packages/bsky/src/data-plane/server/routes/moderation.ts @@ -1,7 +1,7 @@ +import { sql } from 'kysely' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { sql } from 'kysely' +import { Database } from '../db' export default (db: Database): Partial> => ({ async getBlobTakedown(req) { diff --git a/packages/bsky/src/data-plane/server/routes/mutes.ts b/packages/bsky/src/data-plane/server/routes/mutes.ts index 73d9154f28f..8cb7736b85d 100644 --- a/packages/bsky/src/data-plane/server/routes/mutes.ts +++ b/packages/bsky/src/data-plane/server/routes/mutes.ts @@ -3,12 +3,8 @@ import { ServiceImpl } from '@connectrpc/connect' import { AtUri } from '@atproto/syntax' import { ids } from '../../../lexicon/lexicons' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { - CreatedAtDidKeyset, - TimeCidKeyset, - paginate, -} from '../../../db/pagination' +import { Database } from '../db' +import { CreatedAtDidKeyset, TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getActorMutesActor(req) { diff --git a/packages/bsky/src/data-plane/server/routes/notifs.ts b/packages/bsky/src/data-plane/server/routes/notifs.ts index 2609f98e436..a7809814278 100644 --- a/packages/bsky/src/data-plane/server/routes/notifs.ts +++ b/packages/bsky/src/data-plane/server/routes/notifs.ts @@ -1,10 +1,10 @@ -import { ServiceImpl } from '@connectrpc/connect' -import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { countAll, excluded, notSoftDeletedClause } from '../../../db/util' import { sql } from 'kysely' -import { TimeCidKeyset, paginate } from '../../../db/pagination' +import { ServiceImpl } from '@connectrpc/connect' import { Timestamp } from '@bufbuild/protobuf' +import { Service } from '../../gen/bsky_connect' +import { Database } from '../db' +import { countAll, excluded, notSoftDeletedClause } from '../db/util' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getNotifications(req) { diff --git a/packages/bsky/src/data-plane/server/routes/posts.ts b/packages/bsky/src/data-plane/server/routes/posts.ts index f9621cbce5e..5cd58dd4aa6 100644 --- a/packages/bsky/src/data-plane/server/routes/posts.ts +++ b/packages/bsky/src/data-plane/server/routes/posts.ts @@ -1,7 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' import { keyBy } from '@atproto/common' -import { Database } from '../../../db' +import { Database } from '../db' export default (db: Database): Partial> => ({ async getPostReplyCounts(req) { diff --git a/packages/bsky/src/data-plane/server/routes/profile.ts b/packages/bsky/src/data-plane/server/routes/profile.ts index aca3f22eaea..0daa4be7960 100644 --- a/packages/bsky/src/data-plane/server/routes/profile.ts +++ b/packages/bsky/src/data-plane/server/routes/profile.ts @@ -1,8 +1,8 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' import { keyBy } from '@atproto/common' -import { Database } from '../../../db' import { getRecords } from './records' +import { Database } from '../db' export default (db: Database): Partial> => ({ async getActors(req) { diff --git a/packages/bsky/src/data-plane/server/routes/records.ts b/packages/bsky/src/data-plane/server/routes/records.ts index 9c6c9e3a730..08b8f89b7ce 100644 --- a/packages/bsky/src/data-plane/server/routes/records.ts +++ b/packages/bsky/src/data-plane/server/routes/records.ts @@ -6,7 +6,7 @@ import * as ui8 from 'uint8arrays' import { ids } from '../../../lexicon/lexicons' import { Service } from '../../gen/bsky_connect' import { PostRecordMeta, Record } from '../../gen/bsky_pb' -import { Database } from '../../../db' +import { Database } from '../db' export default (db: Database): Partial> => ({ getBlockRecords: getRecords(db, ids.AppBskyGraphBlock), diff --git a/packages/bsky/src/data-plane/server/routes/relationships.ts b/packages/bsky/src/data-plane/server/routes/relationships.ts index c2e6ce33772..1a6817da275 100644 --- a/packages/bsky/src/data-plane/server/routes/relationships.ts +++ b/packages/bsky/src/data-plane/server/routes/relationships.ts @@ -1,9 +1,9 @@ +import { sql } from 'kysely' import { ServiceImpl } from '@connectrpc/connect' -import { Service } from '../../gen/bsky_connect' import { keyBy } from '@atproto/common' -import { Database } from '../../../db' -import { sql } from 'kysely' -import { valuesList } from '../../../db/util' +import { Service } from '../../gen/bsky_connect' +import { Database } from '../db' +import { valuesList } from '../db/util' export default (db: Database): Partial> => ({ async getRelationships(req) { diff --git a/packages/bsky/src/data-plane/server/routes/reposts.ts b/packages/bsky/src/data-plane/server/routes/reposts.ts index 2d9f4da1a46..f0251d0c667 100644 --- a/packages/bsky/src/data-plane/server/routes/reposts.ts +++ b/packages/bsky/src/data-plane/server/routes/reposts.ts @@ -1,8 +1,8 @@ +import { keyBy } from '@atproto/common' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { TimeCidKeyset, paginate } from '../../../db/pagination' -import { keyBy } from '@atproto/common' +import { Database } from '../db' +import { TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async getRepostsBySubject(req) { diff --git a/packages/bsky/src/data-plane/server/routes/search.ts b/packages/bsky/src/data-plane/server/routes/search.ts index 101856553cd..8ab15b93b5a 100644 --- a/packages/bsky/src/data-plane/server/routes/search.ts +++ b/packages/bsky/src/data-plane/server/routes/search.ts @@ -1,11 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { - IndexedAtDidKeyset, - TimeCidKeyset, - paginate, -} from '../../../db/pagination' +import { Database } from '../db' +import { IndexedAtDidKeyset, TimeCidKeyset, paginate } from '../db/pagination' export default (db: Database): Partial> => ({ async searchActors(req) { @@ -13,7 +9,7 @@ export default (db: Database): Partial> => ({ const { ref } = db.db.dynamic let builder = db.db .selectFrom('actor') - .where('actor.handle', 'like', `%${term}%`) + .where('actor.handle', 'like', `%${cleanQuery(term)}%`) .selectAll() const keyset = new IndexedAtDidKeyset( @@ -58,3 +54,6 @@ export default (db: Database): Partial> => ({ } }, }) + +// Remove leading @ in case a handle is input that way +const cleanQuery = (query: string) => query.trim().replace(/^@/g, '') diff --git a/packages/bsky/src/data-plane/server/routes/suggestions.ts b/packages/bsky/src/data-plane/server/routes/suggestions.ts index b1f80007b62..1fd2042ac1c 100644 --- a/packages/bsky/src/data-plane/server/routes/suggestions.ts +++ b/packages/bsky/src/data-plane/server/routes/suggestions.ts @@ -1,7 +1,7 @@ import { sql } from 'kysely' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' +import { Database } from '../db' export default (db: Database): Partial> => ({ async getFollowSuggestions(req) { diff --git a/packages/bsky/src/data-plane/server/routes/sync.ts b/packages/bsky/src/data-plane/server/routes/sync.ts index 11d8e87b746..8a9befb0059 100644 --- a/packages/bsky/src/data-plane/server/routes/sync.ts +++ b/packages/bsky/src/data-plane/server/routes/sync.ts @@ -1,6 +1,6 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' +import { Database } from '../db' export default (db: Database): Partial> => ({ async getLatestRev(req) { diff --git a/packages/bsky/src/data-plane/server/routes/threads.ts b/packages/bsky/src/data-plane/server/routes/threads.ts index 186c151b97e..c3e061d2b67 100644 --- a/packages/bsky/src/data-plane/server/routes/threads.ts +++ b/packages/bsky/src/data-plane/server/routes/threads.ts @@ -1,10 +1,7 @@ import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' -import { Database } from '../../../db' -import { - getAncestorsAndSelfQb, - getDescendentsQb, -} from '../../../services/util/post' +import { Database } from '../db' +import { getAncestorsAndSelfQb, getDescendentsQb } from '../util' export default (db: Database): Partial> => ({ async getThread(req) { diff --git a/packages/bsky/src/services/feed/util.ts b/packages/bsky/src/data-plane/server/util.ts similarity index 55% rename from packages/bsky/src/services/feed/util.ts rename to packages/bsky/src/data-plane/server/util.ts index 83b5e59d705..d15b7ffa518 100644 --- a/packages/bsky/src/services/feed/util.ts +++ b/packages/bsky/src/data-plane/server/util.ts @@ -1,19 +1,77 @@ import { sql } from 'kysely' import { AtUri } from '@atproto/syntax' +import { ids } from '../../lexicon/lexicons' import { Record as PostRecord, ReplyRef, } from '../../lexicon/types/app/bsky/feed/post' -import { - Record as GateRecord, - isFollowingRule, - isListRule, - isMentionRule, -} from '../../lexicon/types/app/bsky/feed/threadgate' -import { isMention } from '../../lexicon/types/app/bsky/richtext/facet' -import { valuesList } from '../../db/util' -import DatabaseSchema from '../../db/database-schema' -import { ids } from '../../lexicon/lexicons' +import { Record as GateRecord } from '../../lexicon/types/app/bsky/feed/threadgate' +import DatabaseSchema from './db/database-schema' +import { valuesList } from './db/util' +import { parseThreadGate } from '../../views/util' + +export const getDescendentsQb = ( + db: DatabaseSchema, + opts: { + uri: string + depth: number // required, protects against cycles + }, +) => { + const { uri, depth } = opts + const query = db.withRecursive('descendent(uri, depth)', (cte) => { + return cte + .selectFrom('post') + .select(['post.uri as uri', sql`1`.as('depth')]) + .where(sql`1`, '<=', depth) + .where('replyParent', '=', uri) + .unionAll( + cte + .selectFrom('post') + .innerJoin('descendent', 'descendent.uri', 'post.replyParent') + .where('descendent.depth', '<', depth) + .select([ + 'post.uri as uri', + sql`descendent.depth + 1`.as('depth'), + ]), + ) + }) + return query +} + +export const getAncestorsAndSelfQb = ( + db: DatabaseSchema, + opts: { + uri: string + parentHeight: number // required, protects against cycles + }, +) => { + const { uri, parentHeight } = opts + const query = db.withRecursive( + 'ancestor(uri, ancestorUri, height)', + (cte) => { + return cte + .selectFrom('post') + .select([ + 'post.uri as uri', + 'post.replyParent as ancestorUri', + sql`0`.as('height'), + ]) + .where('uri', '=', uri) + .unionAll( + cte + .selectFrom('post') + .innerJoin('ancestor', 'ancestor.ancestorUri', 'post.uri') + .where('ancestor.height', '<', parentHeight) + .select([ + 'post.uri as uri', + 'post.replyParent as ancestorUri', + sql`ancestor.height + 1`.as('height'), + ]), + ) + }, + ) + return query +} export const invalidReplyRoot = ( reply: ReplyRef, @@ -35,46 +93,6 @@ export const invalidReplyRoot = ( // replying to a reply: ensure the parent is a reply for the same root post return parent.record.reply?.root.uri !== replyRoot } - -type ParsedThreadGate = { - canReply?: boolean - allowMentions?: boolean - allowFollowing?: boolean - allowListUris?: string[] -} - -export const parseThreadGate = ( - replierDid: string, - ownerDid: string, - rootPost: PostRecord | null, - gate: GateRecord | null, -): ParsedThreadGate => { - if (replierDid === ownerDid) { - return { canReply: true } - } - // if gate.allow is unset then *any* reply is allowed, if it is an empty array then *no* reply is allowed - if (!gate || !gate.allow) { - return { canReply: true } - } - - const allowMentions = !!gate.allow.find(isMentionRule) - const allowFollowing = !!gate.allow.find(isFollowingRule) - const allowListUris = gate.allow?.filter(isListRule).map((item) => item.list) - - // check mentions first since it's quick and synchronous - if (allowMentions) { - const isMentioned = rootPost?.facets?.some((facet) => { - return facet.features.some( - (item) => isMention(item) && item.did === replierDid, - ) - }) - if (isMentioned) { - return { canReply: true, allowMentions, allowFollowing, allowListUris } - } - } - return { allowMentions, allowFollowing, allowListUris } -} - export const violatesThreadGate = async ( db: DatabaseSchema, replierDid: string, @@ -132,9 +150,3 @@ export const postToThreadgateUri = (postUri: string) => { gateUri.collection = ids.AppBskyFeedThreadgate return gateUri.toString() } - -export const threadgateToPostUri = (gateUri: string) => { - const postUri = new AtUri(gateUri) - postUri.collection = ids.AppBskyFeedPost - return postUri.toString() -} diff --git a/packages/bsky/src/db/leader.ts b/packages/bsky/src/db/leader.ts deleted file mode 100644 index ebd44bf98d6..00000000000 --- a/packages/bsky/src/db/leader.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { PoolClient } from 'pg' -import PrimaryDatabase from './primary' - -export class Leader { - session: Session | null = null - constructor(public id: number, public db: PrimaryDatabase) {} - - async run( - task: (ctx: { signal: AbortSignal }) => Promise, - ): Promise> { - const session = await this.lock() - if (!session) return { ran: false } - try { - const result = await task({ signal: session.abortController.signal }) - return { ran: true, result } - } finally { - this.release() - } - } - - destroy(err?: Error) { - this.session?.abortController.abort(err) - } - - private async lock(): Promise { - if (this.session) { - return null - } - - // Postgres implementation uses advisory locking, automatically released by ending connection. - - const client = await this.db.pool.connect() - try { - const lock = await client.query( - 'SELECT pg_try_advisory_lock($1) as acquired', - [this.id], - ) - if (!lock.rows[0].acquired) { - client.release() - return null - } - } catch (err) { - client.release(true) - throw err - } - - const abortController = new AbortController() - client.once('error', (err) => abortController.abort(err)) - this.session = { abortController, client } - return this.session - } - - private release() { - // The flag ensures the connection is destroyed on release, not reused. - // This is required, as that is how the pg advisory lock is released. - this.session?.client.release(true) - this.session = null - } -} - -type Session = { abortController: AbortController; client: PoolClient } - -type RunResult = { ran: false } | { ran: true; result: T } diff --git a/packages/bsky/src/db/views.ts b/packages/bsky/src/db/views.ts deleted file mode 100644 index d5aa9941436..00000000000 --- a/packages/bsky/src/db/views.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { jitter, wait } from '@atproto/common' -import { Leader } from './leader' -import { dbLogger } from '../logger' -import { PrimaryDatabase } from '.' - -export const VIEW_MAINTAINER_ID = 1010 -const VIEWS = ['algo_whats_hot_view'] - -export class ViewMaintainer { - leader = new Leader(VIEW_MAINTAINER_ID, this.db) - destroyed = false - - // @NOTE the db must be authed as the owner of the materialized view, per postgres. - constructor(public db: PrimaryDatabase, public intervalSec = 60) {} - - async run() { - while (!this.destroyed) { - try { - const { ran } = await this.leader.run(async ({ signal }) => { - await this.db.maintainMaterializedViews({ - signal, - views: VIEWS, - intervalSec: this.intervalSec, - }) - }) - if (ran && !this.destroyed) { - throw new Error('View maintainer completed, but should be persistent') - } - } catch (err) { - dbLogger.error( - { - err, - views: VIEWS, - intervalSec: this.intervalSec, - lockId: VIEW_MAINTAINER_ID, - }, - 'view maintainer errored', - ) - } - if (!this.destroyed) { - await wait(10000 + jitter(2000)) - } - } - } - - destroy() { - this.destroyed = true - this.leader.destroy() - } -} diff --git a/packages/bsky/src/index.ts b/packages/bsky/src/index.ts index 55ca62353f9..0b3a3853ad9 100644 --- a/packages/bsky/src/index.ts +++ b/packages/bsky/src/index.ts @@ -5,27 +5,16 @@ import events from 'events' import { createHttpTerminator, HttpTerminator } from 'http-terminator' import cors from 'cors' import compression from 'compression' -import { IdResolver } from '@atproto/identity' +import { DidCache, IdResolver } from '@atproto/identity' import API, { health, wellKnown, blobResolver } from './api' -import { DatabaseCoordinator } from './db' import * as error from './error' -import { dbLogger, loggerMiddleware } from './logger' +import { loggerMiddleware } from './logger' import { ServerConfig } from './config' import { createServer } from './lexicon' import { ImageUriBuilder } from './image/uri' import { BlobDiskCache, ImageProcessingServer } from './image/server' -import { createServices } from './services' import AppContext from './context' -import DidSqlCache from './did-cache' -import { - ImageInvalidator, - ImageProcessingServerInvalidator, -} from './image/invalidator' -import { BackgroundQueue } from './background' import { MountedAlgos } from './feed-gen/types' -import { LabelCache } from './label-cache' -import { NotificationServer } from './notifications' -import { AtpAgent } from '@atproto/api' import { Keypair } from '@atproto/crypto' import { createDataPlaneClient } from './data-plane/client' import { Hydrator } from './hydration/hydrator' @@ -35,16 +24,14 @@ export * from './data-plane' export type { ServerConfigValues } from './config' export type { MountedAlgos } from './feed-gen/types' export { ServerConfig } from './config' -export { Database, PrimaryDatabase, DatabaseCoordinator } from './db' -export { PeriodicModerationEventReversal } from './db/periodic-moderation-event-reversal' +export { + Database, + PrimaryDatabase, + DatabaseCoordinator, +} from './data-plane/server/db' export { Redis } from './redis' -export { ViewMaintainer } from './db/views' export { AppContext } from './context' export { makeAlgos } from './feed-gen' -export * from './daemon' -export * from './indexer' -export * from './ingester' -export { MigrateModerationData } from './migrate-moderation-data' export class BskyAppView { public ctx: AppContext @@ -59,24 +46,17 @@ export class BskyAppView { } static create(opts: { - db: DatabaseCoordinator config: ServerConfig signingKey: Keypair - imgInvalidator?: ImageInvalidator + didCache?: DidCache algos?: MountedAlgos }): BskyAppView { - const { db, config, signingKey, algos = {} } = opts - let maybeImgInvalidator = opts.imgInvalidator + const { config, signingKey, didCache, algos = {} } = opts const app = express() app.use(cors()) app.use(loggerMiddleware) app.use(compression()) - const didCache = new DidSqlCache( - db.getPrimary(), - config.didCacheStaleTTL, - config.didCacheMaxTTL, - ) const idResolver = new IdResolver({ plcUrl: config.didPlcUrl, didCache, @@ -94,51 +74,20 @@ export class BskyAppView { config, imgProcessingCache, ) - maybeImgInvalidator ??= new ImageProcessingServerInvalidator( - imgProcessingCache, - ) - } - - let imgInvalidator: ImageInvalidator - if (maybeImgInvalidator) { - imgInvalidator = maybeImgInvalidator - } else { - throw new Error('Missing appview image invalidator') } - const backgroundQueue = new BackgroundQueue(db.getPrimary()) - const labelCache = new LabelCache(db.getPrimary()) - const notifServer = new NotificationServer(db.getPrimary()) - const searchAgent = config.searchEndpoint - ? new AtpAgent({ service: config.searchEndpoint }) - : undefined - - const services = createServices({ - imgUriBuilder, - imgInvalidator, - labelCache, - }) - const dataplane = createDataPlaneClient(config.dataplaneUrl, '1.1') const hydrator = new Hydrator(dataplane) const views = new Views(imgUriBuilder) const ctx = new AppContext({ - db, cfg: config, - services, dataplane, hydrator, views, - imgUriBuilder, signingKey, idResolver, - didCache, - labelCache, - backgroundQueue, - searchAgent, algos, - notifServer, }) let server = createServer({ @@ -165,39 +114,6 @@ export class BskyAppView { } async start(): Promise { - const { db, backgroundQueue } = this.ctx - const primary = db.getPrimary() - const replicas = db.getReplicas() - this.dbStatsInterval = setInterval(() => { - dbLogger.info( - { - idleCount: replicas.reduce( - (tot, replica) => tot + replica.pool.idleCount, - 0, - ), - totalCount: replicas.reduce( - (tot, replica) => tot + replica.pool.totalCount, - 0, - ), - waitingCount: replicas.reduce( - (tot, replica) => tot + replica.pool.waitingCount, - 0, - ), - primaryIdleCount: primary.pool.idleCount, - primaryTotalCount: primary.pool.totalCount, - primaryWaitingCount: primary.pool.waitingCount, - }, - 'db pool stats', - ) - dbLogger.info( - { - runningCount: backgroundQueue.queue.pending, - waitingCount: backgroundQueue.queue.size, - }, - 'background queue stats', - ) - }, 10000) - this.ctx.labelCache.start() const server = this.app.listen(this.ctx.cfg.port) this.server = server server.keepAliveTimeout = 90000 @@ -208,13 +124,8 @@ export class BskyAppView { return server } - async destroy(opts?: { skipDb: boolean }): Promise { - this.ctx.labelCache.stop() - await this.ctx.didCache.destroy() + async destroy(): Promise { await this.terminator?.terminate() - await this.ctx.backgroundQueue.destroy() - if (!opts?.skipDb) await this.ctx.db.close() - clearInterval(this.dbStatsInterval) } } diff --git a/packages/bsky/src/label-cache.ts b/packages/bsky/src/label-cache.ts deleted file mode 100644 index b162a2d30bd..00000000000 --- a/packages/bsky/src/label-cache.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { wait } from '@atproto/common' -import { PrimaryDatabase } from './db' -import { Label } from './db/tables/label' -import { labelerLogger as log } from './logger' - -export class LabelCache { - bySubject: Record = {} - latestLabel = '' - refreshes = 0 - - destroyed = false - - constructor(public db: PrimaryDatabase) {} - - start() { - this.poll() - } - - async fullRefresh() { - const allLabels = await this.db.db.selectFrom('label').selectAll().execute() - this.wipeCache() - this.processLabels(allLabels) - } - - async partialRefresh() { - const labels = await this.db.db - .selectFrom('label') - .selectAll() - .where('cts', '>', this.latestLabel) - .execute() - this.processLabels(labels) - } - - async poll() { - try { - if (this.destroyed) return - if (this.refreshes >= 120) { - await this.fullRefresh() - this.refreshes = 0 - } else { - await this.partialRefresh() - this.refreshes++ - } - } catch (err) { - log.error( - { err, latestLabel: this.latestLabel, refreshes: this.refreshes }, - 'label cache failed to refresh', - ) - } - await wait(500) - this.poll() - } - - processLabels(labels: Label[]) { - for (const label of labels) { - if (label.cts > this.latestLabel) { - this.latestLabel = label.cts - } - this.bySubject[label.uri] ??= [] - this.bySubject[label.uri].push(label) - } - } - - wipeCache() { - this.bySubject = {} - } - - stop() { - this.destroyed = true - } - - forSubject(subject: string, includeNeg = false): Label[] { - const labels = this.bySubject[subject] ?? [] - return includeNeg ? labels : labels.filter((l) => l.neg === false) - } - - forSubjects(subjects: string[], includeNeg?: boolean): Label[] { - let labels: Label[] = [] - const alreadyAdded = new Set() - for (const subject of subjects) { - if (alreadyAdded.has(subject)) { - continue - } - const subLabels = this.forSubject(subject, includeNeg) - labels = [...labels, ...subLabels] - alreadyAdded.add(subject) - } - return labels - } -} diff --git a/packages/bsky/src/notifications.ts b/packages/bsky/src/notifications.ts deleted file mode 100644 index fdf24919d19..00000000000 --- a/packages/bsky/src/notifications.ts +++ /dev/null @@ -1,382 +0,0 @@ -import axios from 'axios' -import { Insertable, sql } from 'kysely' -import TTLCache from '@isaacs/ttlcache' -import { AtUri } from '@atproto/api' -import { MINUTE, chunkArray } from '@atproto/common' -import Database from './db/primary' -import { Notification } from './db/tables/notification' -import { NotificationPushToken as PushToken } from './db/tables/notification-push-token' -import logger from './indexer/logger' -import { notSoftDeletedClause, valuesList } from './db/util' -import { ids } from './lexicon/lexicons' -import { retryHttp } from './util/retry' - -export type Platform = 'ios' | 'android' | 'web' - -type PushNotification = { - tokens: string[] - platform: 1 | 2 // 1 = ios, 2 = android - title: string - message: string - topic: string - data?: { - [key: string]: string - } - collapse_id?: string - collapse_key?: string -} - -type InsertableNotif = Insertable - -type NotifDisplay = { - key: string - rateLimit: boolean - title: string - body: string - notif: InsertableNotif -} - -export class NotificationServer { - private rateLimiter = new RateLimiter(1, 30 * MINUTE) - - constructor(public db: Database, public pushEndpoint?: string) {} - - async getTokensByDid(dids: string[]) { - if (!dids.length) return {} - const tokens = await this.db.db - .selectFrom('notification_push_token') - .where('did', 'in', dids) - .selectAll() - .execute() - return tokens.reduce((acc, token) => { - acc[token.did] ??= [] - acc[token.did].push(token) - return acc - }, {} as Record) - } - - async prepareNotifsToSend(notifications: InsertableNotif[]) { - const now = Date.now() - const notifsToSend: PushNotification[] = [] - const tokensByDid = await this.getTokensByDid( - unique(notifications.map((n) => n.did)), - ) - // views for all notifications that have tokens - const notificationViews = await this.getNotificationDisplayAttributes( - notifications.filter((n) => tokensByDid[n.did]), - ) - - for (const notifView of notificationViews) { - if (!isRecent(notifView.notif.sortAt, 10 * MINUTE)) { - continue // if the notif is from > 10 minutes ago, don't send push notif - } - const { did: userDid } = notifView.notif - const userTokens = tokensByDid[userDid] ?? [] - for (const t of userTokens) { - const { appId, platform, token } = t - if (notifView.rateLimit && !this.rateLimiter.check(token, now)) { - continue - } - if (platform === 'ios' || platform === 'android') { - notifsToSend.push({ - tokens: [token], - platform: platform === 'ios' ? 1 : 2, - title: notifView.title, - message: notifView.body, - topic: appId, - data: { - reason: notifView.notif.reason, - recordUri: notifView.notif.recordUri, - recordCid: notifView.notif.recordCid, - }, - collapse_id: notifView.key, - collapse_key: notifView.key, - }) - } else { - // @TODO: Handle web notifs - logger.warn({ did: userDid }, 'cannot send web notification to user') - } - } - } - - return notifsToSend - } - - /** - * The function `addNotificationsToQueue` adds push notifications to a queue, taking into account rate - * limiting and batching the notifications for efficient processing. - * @param {PushNotification[]} notifs - An array of PushNotification objects. Each PushNotification - * object has a "tokens" property which is an array of tokens. - * @returns void - */ - async processNotifications(notifs: PushNotification[]) { - for (const batch of chunkArray(notifs, 20)) { - try { - await this.sendPushNotifications(batch) - } catch (err) { - logger.error({ err, batch }, 'notification push batch failed') - } - } - } - - /** 1. Get the user's token (APNS or FCM for iOS and Android respectively) from the database - User token will be in the format: - did || token || platform (1 = iOS, 2 = Android, 3 = Web) - 2. Send notification to `gorush` server with token - Notification will be in the format: - "notifications": [ - { - "tokens": string[], - "platform": 1 | 2, - "message": string, - "title": string, - "priority": "normal" | "high", - "image": string, (Android only) - "expiration": number, (iOS only) - "badge": number, (iOS only) - } - ] - 3. `gorush` will send notification to APNS or FCM - 4. store response from `gorush` which contains the ID of the notification - 5. If notification needs to be updated or deleted, find the ID of the notification from the database and send a new notification to `gorush` with the ID (repeat step 2) - */ - private async sendPushNotifications(notifications: PushNotification[]) { - // if pushEndpoint is not defined, we are not running in the indexer service, so we can't send push notifications - if (!this.pushEndpoint) { - throw new Error('Push endpoint not defined') - } - // if no notifications, skip and return early - if (notifications.length === 0) { - return - } - const pushEndpoint = this.pushEndpoint - await retryHttp(() => - axios.post( - pushEndpoint, - { notifications }, - { - headers: { - 'Content-Type': 'application/json', - accept: 'application/json', - }, - }, - ), - ) - } - - async registerDeviceForPushNotifications( - did: string, - token: string, - platform: Platform, - appId: string, - ) { - // if token doesn't exist, insert it, on conflict do nothing - await this.db.db - .insertInto('notification_push_token') - .values({ did, token, platform, appId }) - .onConflict((oc) => oc.doNothing()) - .execute() - } - - async getNotificationDisplayAttributes( - notifs: InsertableNotif[], - ): Promise { - const { ref } = this.db.db.dynamic - const authorDids = notifs.map((n) => n.author) - const subjectUris = notifs.flatMap((n) => n.reasonSubject ?? []) - const recordUris = notifs.map((n) => n.recordUri) - const allUris = [...subjectUris, ...recordUris] - - // gather potential display data for notifications in batch - const [authors, posts, blocksAndMutes] = await Promise.all([ - this.db.db - .selectFrom('actor') - .leftJoin('profile', 'profile.creator', 'actor.did') - .leftJoin('record', 'record.uri', 'profile.uri') - .where(notSoftDeletedClause(ref('actor'))) - .where(notSoftDeletedClause(ref('record'))) - .where('profile.creator', 'in', authorDids.length ? authorDids : ['']) - .select(['actor.did as did', 'handle', 'displayName']) - .execute(), - this.db.db - .selectFrom('post') - .innerJoin('actor', 'actor.did', 'post.creator') - .innerJoin('record', 'record.uri', 'post.uri') - .where(notSoftDeletedClause(ref('actor'))) - .where(notSoftDeletedClause(ref('record'))) - .where('post.uri', 'in', allUris.length ? allUris : ['']) - .select(['post.uri as uri', 'text']) - .execute(), - this.findBlocksAndMutes(notifs), - ]) - - const authorsByDid = authors.reduce((acc, author) => { - acc[author.did] = author - return acc - }, {} as Record) - const postsByUri = posts.reduce((acc, post) => { - acc[post.uri] = post - return acc - }, {} as Record) - - const results: NotifDisplay[] = [] - - for (const notif of notifs) { - const { - author: authorDid, - reason, - reasonSubject: subjectUri, // if like/reply/quote/mention, the post which was liked/replied to/mention is in/or quoted. if custom feed liked, the feed which was liked - recordUri, - } = notif - - const author = - authorsByDid[authorDid]?.displayName || authorsByDid[authorDid]?.handle - const postRecord = postsByUri[recordUri] - const postSubject = subjectUri ? postsByUri[subjectUri] : null - - // if blocked or muted, don't send notification - const shouldFilter = blocksAndMutes.some( - (pair) => pair.author === notif.author && pair.receiver === notif.did, - ) - if (shouldFilter || !author) { - // if no display name, dont send notification - continue - } - // const author = displayName.displayName - - // 2. Get post data content - // if follow, get the URI of the author's profile - // if reply, or mention, get URI of the postRecord - // if like, or custom feed like, or repost get the URI of the reasonSubject - const key = reason - let title = '' - let body = '' - let rateLimit = true - - // check follow first and mention first because they don't have subjectUri and return - // reply has subjectUri but the recordUri is the replied post - if (reason === 'follow') { - title = 'New follower!' - body = `${author} has followed you` - results.push({ key, title, body, notif, rateLimit }) - continue - } else if (reason === 'mention' || reason === 'reply') { - // use recordUri for mention and reply - title = - reason === 'mention' - ? `${author} mentioned you` - : `${author} replied to your post` - body = postRecord?.text || '' - rateLimit = false // always deliver - results.push({ key, title, body, notif, rateLimit }) - continue - } - - // if no subjectUri, don't send notification - // at this point, subjectUri should exist for all the other reasons - if (!postSubject) { - continue - } - - if (reason === 'like') { - title = `${author} liked your post` - body = postSubject?.text || '' - // custom feed like - const uri = subjectUri ? new AtUri(subjectUri) : null - if (uri?.collection === ids.AppBskyFeedGenerator) { - title = `${author} liked your custom feed` - body = uri?.rkey ?? '' - } - } else if (reason === 'quote') { - title = `${author} quoted your post` - body = postSubject?.text || '' - rateLimit = true // always deliver - } else if (reason === 'repost') { - title = `${author} reposted your post` - body = postSubject?.text || '' - } - - if (title === '' && body === '') { - logger.warn( - { notif }, - 'No notification display attributes found for this notification. Either profile or post data for this notification is missing.', - ) - continue - } - - results.push({ key, title, body, notif, rateLimit }) - } - - return results - } - - async findBlocksAndMutes(notifs: InsertableNotif[]) { - const pairs = notifs.map((n) => ({ author: n.author, receiver: n.did })) - const { ref } = this.db.db.dynamic - const blockQb = this.db.db - .selectFrom('actor_block') - .where((outer) => - outer - .where((qb) => - qb - .whereRef('actor_block.creator', '=', ref('author')) - .whereRef('actor_block.subjectDid', '=', ref('receiver')), - ) - .orWhere((qb) => - qb - .whereRef('actor_block.creator', '=', ref('receiver')) - .whereRef('actor_block.subjectDid', '=', ref('author')), - ), - ) - .select(['creator', 'subjectDid']) - const muteQb = this.db.db - .selectFrom('mute') - .whereRef('mute.subjectDid', '=', ref('author')) - .whereRef('mute.mutedByDid', '=', ref('receiver')) - .selectAll() - const muteListQb = this.db.db - .selectFrom('list_item') - .innerJoin('list_mute', 'list_mute.listUri', 'list_item.listUri') - .whereRef('list_mute.mutedByDid', '=', ref('receiver')) - .whereRef('list_item.subjectDid', '=', ref('author')) - .select('list_item.subjectDid') - - const values = valuesList(pairs.map((p) => sql`${p.author}, ${p.receiver}`)) - const filterPairs = await this.db.db - .selectFrom(values.as(sql`pair (author, receiver)`)) - .whereExists(muteQb) - .orWhereExists(muteListQb) - .orWhereExists(blockQb) - .selectAll() - .execute() - return filterPairs as { author: string; receiver: string }[] - } -} - -const isRecent = (isoTime: string, timeDiff: number): boolean => { - const diff = Date.now() - new Date(isoTime).getTime() - return diff < timeDiff -} - -const unique = (items: string[]) => [...new Set(items)] - -class RateLimiter { - private rateLimitCache = new TTLCache({ - max: 50000, - ttl: this.windowMs, - noUpdateTTL: true, - }) - constructor(private limit: number, private windowMs: number) {} - check(token: string, now = Date.now()) { - const key = getRateLimitKey(token, now) - const last = this.rateLimitCache.get(key) ?? 0 - const current = last + 1 - this.rateLimitCache.set(key, current) - return current <= this.limit - } -} - -const getRateLimitKey = (token: string, now: number) => { - const iteration = Math.floor(now / (20 * MINUTE)) - return `${iteration}:${token}` -} diff --git a/packages/bsky/src/services/actor/index.ts b/packages/bsky/src/services/actor/index.ts deleted file mode 100644 index 51be90892fc..00000000000 --- a/packages/bsky/src/services/actor/index.ts +++ /dev/null @@ -1,186 +0,0 @@ -import { sql } from 'kysely' -import { wait } from '@atproto/common' -import { Database } from '../../db' -import { notSoftDeletedClause } from '../../db/util' -import { ActorViews } from './views' -import { ImageUriBuilder } from '../../image/uri' -import { Actor } from '../../db/tables/actor' -import { LabelCache } from '../../label-cache' -import { TimeCidKeyset, paginate } from '../../db/pagination' -import { SearchKeyset, getUserSearchQuery } from '../util/search' - -export * from './types' - -export class ActorService { - constructor( - public db: Database, - public imgUriBuilder: ImageUriBuilder, - public labelCache: LabelCache, - ) {} - - static creator(imgUriBuilder: ImageUriBuilder, labelCache: LabelCache) { - return (db: Database) => new ActorService(db, imgUriBuilder, labelCache) - } - - views = new ActorViews(this.db, this.imgUriBuilder, this.labelCache) - - async getActorDid(handleOrDid: string): Promise { - if (handleOrDid.startsWith('did:')) { - return handleOrDid - } - const subject = await this.getActor(handleOrDid, true) - return subject?.did ?? null - } - - async getActor( - handleOrDid: string, - includeSoftDeleted = false, - ): Promise { - const actors = await this.getActors([handleOrDid], includeSoftDeleted) - return actors[0] || null - } - - async getActors( - handleOrDids: string[], - includeSoftDeleted = false, - ): Promise { - const { ref } = this.db.db.dynamic - const dids: string[] = [] - const handles: string[] = [] - const order: Record = {} - handleOrDids.forEach((item, i) => { - if (item.startsWith('did:')) { - order[item] = i - dids.push(item) - } else { - order[item.toLowerCase()] = i - handles.push(item.toLowerCase()) - } - }) - const results = await this.db.db - .selectFrom('actor') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('actor'))), - ) - .where((qb) => { - if (dids.length) { - qb = qb.orWhere('actor.did', 'in', dids) - } - if (handles.length) { - qb = qb.orWhere( - 'actor.handle', - 'in', - handles.length === 1 - ? [handles[0], handles[0]] // a silly (but worthwhile) optimization to avoid usage of actor_handle_tgrm_idx - : handles, - ) - } - return qb - }) - .selectAll() - .execute() - - return results.sort((a, b) => { - const orderA = order[a.did] ?? order[a.handle?.toLowerCase() ?? ''] - const orderB = order[b.did] ?? order[b.handle?.toLowerCase() ?? ''] - return orderA - orderB - }) - } - - async getSearchResults({ - cursor, - limit = 25, - query = '', - includeSoftDeleted, - }: { - cursor?: string - limit?: number - query?: string - includeSoftDeleted?: boolean - }) { - const searchField = query.startsWith('did:') ? 'did' : 'handle' - let paginatedBuilder - const { ref } = this.db.db.dynamic - const paginationOptions = { - limit, - cursor, - direction: 'asc' as const, - } - let keyset - - if (query && searchField === 'handle') { - keyset = new SearchKeyset(sql``, sql``) - paginatedBuilder = getUserSearchQuery(this.db, { - query, - includeSoftDeleted, - ...paginationOptions, - }).select('distance') - } else { - paginatedBuilder = this.db.db - .selectFrom('actor') - .select([sql`0`.as('distance')]) - keyset = new ListKeyset(ref('indexedAt'), ref('did')) - - // When searchField === 'did', the query will always be a valid string because - // searchField is set to 'did' after checking that the query is a valid did - if (query && searchField === 'did') { - paginatedBuilder = paginatedBuilder.where('actor.did', '=', query) - } - paginatedBuilder = paginate(paginatedBuilder, { - keyset, - ...paginationOptions, - }) - } - - const results: Actor[] = await paginatedBuilder.selectAll('actor').execute() - return { results, cursor: keyset.packFromResult(results) } - } - - async getRepoRev(did: string | null): Promise { - if (did === null) return null - const res = await this.db.db - .selectFrom('actor_sync') - .select('repoRev') - .where('did', '=', did) - .executeTakeFirst() - return res?.repoRev ?? null - } - - async *all( - opts: { batchSize?: number; forever?: boolean; cooldownMs?: number } = {}, - ) { - const { cooldownMs = 1000, batchSize = 1000, forever = false } = opts - const baseQuery = this.db.db - .selectFrom('actor') - .selectAll() - .orderBy('did') - .limit(batchSize) - while (true) { - let cursor: ActorResult | undefined - do { - const actors = cursor - ? await baseQuery.where('did', '>', cursor.did).execute() - : await baseQuery.execute() - for (const actor of actors) { - yield actor - } - cursor = actors.at(-1) - } while (cursor) - if (forever) { - await wait(cooldownMs) - } else { - return - } - } - } -} - -type ActorResult = Actor -export class ListKeyset extends TimeCidKeyset<{ - indexedAt: string - did: string // handles are treated identically to cids in TimeCidKeyset -}> { - labelResult(result: { indexedAt: string; did: string }) { - return { primary: result.indexedAt, secondary: result.did } - } -} diff --git a/packages/bsky/src/services/actor/types.ts b/packages/bsky/src/services/actor/types.ts deleted file mode 100644 index d622e641099..00000000000 --- a/packages/bsky/src/services/actor/types.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { ListViewBasic } from '../../lexicon/types/app/bsky/graph/defs' -import { Label } from '../../lexicon/types/com/atproto/label/defs' -import { BlockAndMuteState } from '../graph' -import { ListInfoMap } from '../graph/types' -import { Labels } from '../label' - -export type ActorInfo = { - did: string - handle: string - displayName?: string - description?: string // omitted from basic profile view - avatar?: string - indexedAt?: string // omitted from basic profile view - viewer?: { - muted?: boolean - mutedByList?: ListViewBasic - blockedBy?: boolean - blocking?: string - blockingByList?: ListViewBasic - following?: string - followedBy?: string - } - labels?: Label[] -} -export type ActorInfoMap = { [did: string]: ActorInfo } - -export type ProfileViewMap = ActorInfoMap - -export type ProfileInfo = { - did: string - handle: string | null - profileUri: string | null - profileCid: string | null - displayName: string | null - description: string | null - avatarCid: string | null - indexedAt: string | null - profileJson: string | null - viewerFollowing: string | null - viewerFollowedBy: string | null -} - -export type ProfileInfoMap = { [did: string]: ProfileInfo } - -export type ProfileHydrationState = { - profiles: ProfileInfoMap - labels: Labels - lists: ListInfoMap - bam: BlockAndMuteState -} - -export type ProfileDetailInfo = ProfileInfo & { - bannerCid: string | null - followsCount: number | null - followersCount: number | null - postsCount: number | null -} - -export type ProfileDetailInfoMap = { [did: string]: ProfileDetailInfo } - -export type ProfileDetailHydrationState = { - profilesDetailed: ProfileDetailInfoMap - labels: Labels - lists: ListInfoMap - bam: BlockAndMuteState -} - -export const toMapByDid = ( - items: T[], -): Record => { - return items.reduce((cur, item) => { - cur[item.did] = item - return cur - }, {} as Record) -} diff --git a/packages/bsky/src/services/actor/views.ts b/packages/bsky/src/services/actor/views.ts deleted file mode 100644 index 5c40eac308b..00000000000 --- a/packages/bsky/src/services/actor/views.ts +++ /dev/null @@ -1,372 +0,0 @@ -import { mapDefined } from '@atproto/common' -import { INVALID_HANDLE } from '@atproto/syntax' -import { jsonStringToLex } from '@atproto/lexicon' -import { - ProfileViewDetailed, - ProfileView, -} from '../../lexicon/types/app/bsky/actor/defs' -import { Database } from '../../db' -import { noMatch, notSoftDeletedClause } from '../../db/util' -import { Actor } from '../../db/tables/actor' -import { ImageUriBuilder } from '../../image/uri' -import { LabelService, Labels, getSelfLabels } from '../label' -import { BlockAndMuteState, GraphService } from '../graph' -import { LabelCache } from '../../label-cache' -import { - ActorInfoMap, - ProfileDetailHydrationState, - ProfileHydrationState, - ProfileInfoMap, - ProfileViewMap, - toMapByDid, -} from './types' -import { ListInfoMap } from '../graph/types' - -export class ActorViews { - constructor( - private db: Database, - private imgUriBuilder: ImageUriBuilder, - private labelCache: LabelCache, - ) {} - - services = { - label: LabelService.creator(this.labelCache)(this.db), - graph: GraphService.creator(this.imgUriBuilder)(this.db), - } - - async profiles( - results: (ActorResult | string)[], // @TODO simplify down to just string[] - viewer: string | null, - opts?: { includeSoftDeleted?: boolean }, - ): Promise { - if (results.length === 0) return {} - const dids = results.map((res) => (typeof res === 'string' ? res : res.did)) - const hydrated = await this.profileHydration(dids, { - viewer, - ...opts, - }) - return this.profilePresentation(dids, hydrated, viewer) - } - - async profilesBasic( - results: (ActorResult | string)[], - viewer: string | null, - opts?: { omitLabels?: boolean; includeSoftDeleted?: boolean }, - ): Promise { - if (results.length === 0) return {} - const dids = results.map((res) => (typeof res === 'string' ? res : res.did)) - const hydrated = await this.profileHydration(dids, { - viewer, - includeSoftDeleted: opts?.includeSoftDeleted, - }) - return this.profileBasicPresentation(dids, hydrated, viewer, opts) - } - - async profilesList( - results: ActorResult[], - viewer: string | null, - opts?: { includeSoftDeleted?: boolean }, - ): Promise { - const profiles = await this.profiles(results, viewer, opts) - return mapDefined(results, (result) => profiles[result.did]) - } - - async profileDetailHydration( - dids: string[], - opts: { - viewer?: string | null - includeSoftDeleted?: boolean - }, - state?: { - bam: BlockAndMuteState - labels: Labels - }, - ): Promise { - const { viewer = null, includeSoftDeleted } = opts - const { ref } = this.db.db.dynamic - const profileInfosQb = this.db.db - .selectFrom('actor') - .where('actor.did', 'in', dids.length ? dids : ['']) - .leftJoin('profile', 'profile.creator', 'actor.did') - .leftJoin('profile_agg', 'profile_agg.did', 'actor.did') - .leftJoin('record', 'record.uri', 'profile.uri') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('actor'))), - ) - .select([ - 'actor.did as did', - 'actor.handle as handle', - 'profile.uri as profileUri', - 'profile.cid as profileCid', - 'profile.displayName as displayName', - 'profile.description as description', - 'profile.avatarCid as avatarCid', - 'profile.bannerCid as bannerCid', - 'profile.indexedAt as indexedAt', - 'profile_agg.followsCount as followsCount', - 'profile_agg.followersCount as followersCount', - 'profile_agg.postsCount as postsCount', - 'record.json as profileJson', - this.db.db - .selectFrom('follow') - .if(!viewer, (q) => q.where(noMatch)) - .where('creator', '=', viewer ?? '') - .whereRef('subjectDid', '=', ref('actor.did')) - .select('uri') - .as('viewerFollowing'), - this.db.db - .selectFrom('follow') - .if(!viewer, (q) => q.where(noMatch)) - .whereRef('creator', '=', ref('actor.did')) - .where('subjectDid', '=', viewer ?? '') - .select('uri') - .as('viewerFollowedBy'), - ]) - const [profiles, labels, bam] = await Promise.all([ - profileInfosQb.execute(), - this.services.label.getLabelsForSubjects(dids, state?.labels), - this.services.graph.getBlockAndMuteState( - viewer ? dids.map((did) => [viewer, did]) : [], - state?.bam, - ), - ]) - const listUris = mapDefined(profiles, ({ did }) => { - const muteList = viewer && bam.muteList([viewer, did]) - const blockList = viewer && bam.blockList([viewer, did]) - const lists: string[] = [] - if (muteList) lists.push(muteList) - if (blockList) lists.push(blockList) - return lists - }).flat() - const lists = await this.services.graph.getListViews(listUris, viewer) - return { profilesDetailed: toMapByDid(profiles), labels, bam, lists } - } - - profileDetailPresentation( - dids: string[], - state: ProfileDetailHydrationState, - opts: { - viewer?: string | null - }, - ): Record { - const { viewer } = opts - const { profilesDetailed, lists, labels, bam } = state - return dids.reduce((acc, did) => { - const prof = profilesDetailed[did] - if (!prof) return acc - const avatar = prof?.avatarCid - ? this.imgUriBuilder.getPresetUri('avatar', prof.did, prof.avatarCid) - : undefined - const banner = prof?.bannerCid - ? this.imgUriBuilder.getPresetUri('banner', prof.did, prof.bannerCid) - : undefined - const mutedByListUri = viewer && bam.muteList([viewer, did]) - const mutedByList = - mutedByListUri && lists[mutedByListUri] - ? this.services.graph.formatListViewBasic(lists[mutedByListUri]) - : undefined - const blockingByListUri = viewer && bam.blockList([viewer, did]) - const blockingByList = - blockingByListUri && lists[blockingByListUri] - ? this.services.graph.formatListViewBasic(lists[blockingByListUri]) - : undefined - const actorLabels = labels[did] ?? [] - const selfLabels = getSelfLabels({ - uri: prof.profileUri, - cid: prof.profileCid, - record: - prof.profileJson !== null - ? (jsonStringToLex(prof.profileJson) as Record) - : null, - }) - acc[did] = { - did: prof.did, - handle: prof.handle ?? INVALID_HANDLE, - displayName: prof?.displayName || undefined, - description: prof?.description || undefined, - avatar, - banner, - followsCount: prof?.followsCount ?? 0, - followersCount: prof?.followersCount ?? 0, - postsCount: prof?.postsCount ?? 0, - indexedAt: prof?.indexedAt || undefined, - viewer: viewer - ? { - muted: bam.mute([viewer, did]), - mutedByList, - blockedBy: !!bam.blockedBy([viewer, did]), - blocking: bam.blocking([viewer, did]) ?? undefined, - blockingByList, - following: - prof?.viewerFollowing && !bam.block([viewer, did]) - ? prof.viewerFollowing - : undefined, - followedBy: - prof?.viewerFollowedBy && !bam.block([viewer, did]) - ? prof.viewerFollowedBy - : undefined, - } - : undefined, - labels: [...actorLabels, ...selfLabels], - } - return acc - }, {} as Record) - } - - async profileHydration( - dids: string[], - opts: { - viewer?: string | null - includeSoftDeleted?: boolean - }, - state?: { - bam: BlockAndMuteState - labels: Labels - }, - ): Promise { - const { viewer = null, includeSoftDeleted } = opts - const { ref } = this.db.db.dynamic - const profileInfosQb = this.db.db - .selectFrom('actor') - .where('actor.did', 'in', dids.length ? dids : ['']) - .leftJoin('profile', 'profile.creator', 'actor.did') - .leftJoin('record', 'record.uri', 'profile.uri') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('actor'))), - ) - .select([ - 'actor.did as did', - 'actor.handle as handle', - 'profile.uri as profileUri', - 'profile.cid as profileCid', - 'profile.displayName as displayName', - 'profile.description as description', - 'profile.avatarCid as avatarCid', - 'profile.indexedAt as indexedAt', - 'record.json as profileJson', - this.db.db - .selectFrom('follow') - .if(!viewer, (q) => q.where(noMatch)) - .where('creator', '=', viewer ?? '') - .whereRef('subjectDid', '=', ref('actor.did')) - .select('uri') - .as('viewerFollowing'), - this.db.db - .selectFrom('follow') - .if(!viewer, (q) => q.where(noMatch)) - .whereRef('creator', '=', ref('actor.did')) - .where('subjectDid', '=', viewer ?? '') - .select('uri') - .as('viewerFollowedBy'), - ]) - const [profiles, labels, bam] = await Promise.all([ - profileInfosQb.execute(), - this.services.label.getLabelsForSubjects(dids, state?.labels), - this.services.graph.getBlockAndMuteState( - viewer ? dids.map((did) => [viewer, did]) : [], - state?.bam, - ), - ]) - const listUris = mapDefined(profiles, ({ did }) => { - const muteList = viewer && bam.muteList([viewer, did]) - const blockList = viewer && bam.blockList([viewer, did]) - const lists: string[] = [] - if (muteList) lists.push(muteList) - if (blockList) lists.push(blockList) - return lists - }).flat() - const lists = await this.services.graph.getListViews(listUris, viewer) - return { profiles: toMapByDid(profiles), labels, bam, lists } - } - - profilePresentation( - dids: string[], - state: { - profiles: ProfileInfoMap - lists: ListInfoMap - labels: Labels - bam: BlockAndMuteState - }, - viewer: string | null, - ): ProfileViewMap { - const { profiles, lists, labels, bam } = state - return dids.reduce((acc, did) => { - const prof = profiles[did] - if (!prof) return acc - const avatar = prof?.avatarCid - ? this.imgUriBuilder.getPresetUri('avatar', prof.did, prof.avatarCid) - : undefined - const mutedByListUri = viewer && bam.muteList([viewer, did]) - const mutedByList = - mutedByListUri && lists[mutedByListUri] - ? this.services.graph.formatListViewBasic(lists[mutedByListUri]) - : undefined - const blockingByListUri = viewer && bam.blockList([viewer, did]) - const blockingByList = - blockingByListUri && lists[blockingByListUri] - ? this.services.graph.formatListViewBasic(lists[blockingByListUri]) - : undefined - const actorLabels = labels[did] ?? [] - const selfLabels = getSelfLabels({ - uri: prof.profileUri, - cid: prof.profileCid, - record: - prof.profileJson !== null - ? (jsonStringToLex(prof.profileJson) as Record) - : null, - }) - acc[did] = { - did: prof.did, - handle: prof.handle ?? INVALID_HANDLE, - displayName: prof?.displayName || undefined, - description: prof?.description || undefined, - avatar, - indexedAt: prof?.indexedAt || undefined, - viewer: viewer - ? { - muted: bam.mute([viewer, did]), - mutedByList, - blockedBy: !!bam.blockedBy([viewer, did]), - blocking: bam.blocking([viewer, did]) ?? undefined, - blockingByList, - following: - prof?.viewerFollowing && !bam.block([viewer, did]) - ? prof.viewerFollowing - : undefined, - followedBy: - prof?.viewerFollowedBy && !bam.block([viewer, did]) - ? prof.viewerFollowedBy - : undefined, - } - : undefined, - labels: [...actorLabels, ...selfLabels], - } - return acc - }, {} as ProfileViewMap) - } - - profileBasicPresentation( - dids: string[], - state: ProfileHydrationState, - viewer: string | null, - opts?: { - omitLabels?: boolean - }, - ): ProfileViewMap { - const result = this.profilePresentation(dids, state, viewer) - return Object.values(result).reduce((acc, prof) => { - const profileBasic = { - did: prof.did, - handle: prof.handle, - displayName: prof.displayName, - avatar: prof.avatar, - viewer: prof.viewer, - labels: opts?.omitLabels ? undefined : prof.labels, - } - acc[prof.did] = profileBasic - return acc - }, {} as ProfileViewMap) - } -} - -type ActorResult = Actor diff --git a/packages/bsky/src/services/feed/index.ts b/packages/bsky/src/services/feed/index.ts deleted file mode 100644 index 2323e6a74be..00000000000 --- a/packages/bsky/src/services/feed/index.ts +++ /dev/null @@ -1,547 +0,0 @@ -import { sql } from 'kysely' -import { AtUri } from '@atproto/syntax' -import { jsonStringToLex } from '@atproto/lexicon' -import { Database } from '../../db' -import { countAll, noMatch, notSoftDeletedClause } from '../../db/util' -import { ImageUriBuilder } from '../../image/uri' -import { ids } from '../../lexicon/lexicons' -import { - Record as PostRecord, - isRecord as isPostRecord, -} from '../../lexicon/types/app/bsky/feed/post' -import { - Record as ThreadgateRecord, - isListRule, -} from '../../lexicon/types/app/bsky/feed/threadgate' -import { isMain as isEmbedImages } from '../../lexicon/types/app/bsky/embed/images' -import { isMain as isEmbedExternal } from '../../lexicon/types/app/bsky/embed/external' -import { - isMain as isEmbedRecord, - isViewRecord, -} from '../../lexicon/types/app/bsky/embed/record' -import { isMain as isEmbedRecordWithMedia } from '../../lexicon/types/app/bsky/embed/recordWithMedia' -import { - PostInfoMap, - FeedItemType, - FeedRow, - FeedGenInfoMap, - PostEmbedViews, - RecordEmbedViewRecordMap, - PostInfo, - RecordEmbedViewRecord, - PostBlocksMap, - FeedHydrationState, - ThreadgateInfoMap, -} from './types' -import { LabelService } from '../label' -import { ActorService } from '../actor' -import { - BlockAndMuteState, - GraphService, - ListInfoMap, - RelationshipPair, -} from '../graph' -import { FeedViews } from './views' -import { LabelCache } from '../../label-cache' -import { threadgateToPostUri, postToThreadgateUri } from './util' -import { mapDefined } from '@atproto/common' - -export * from './types' - -export class FeedService { - constructor( - public db: Database, - public imgUriBuilder: ImageUriBuilder, - public labelCache: LabelCache, - ) {} - - views = new FeedViews(this.db, this.imgUriBuilder, this.labelCache) - - services = { - label: LabelService.creator(this.labelCache)(this.db), - actor: ActorService.creator(this.imgUriBuilder, this.labelCache)(this.db), - graph: GraphService.creator(this.imgUriBuilder)(this.db), - } - - static creator(imgUriBuilder: ImageUriBuilder, labelCache: LabelCache) { - return (db: Database) => new FeedService(db, imgUriBuilder, labelCache) - } - - selectPostQb() { - return this.db.db - .selectFrom('post') - .select([ - sql`${'post'}`.as('type'), - 'post.uri as uri', - 'post.cid as cid', - 'post.uri as postUri', - 'post.creator as originatorDid', - 'post.creator as postAuthorDid', - 'post.replyParent as replyParent', - 'post.replyRoot as replyRoot', - 'post.sortAt as sortAt', - ]) - } - - selectFeedItemQb() { - return this.db.db - .selectFrom('feed_item') - .innerJoin('post', 'post.uri', 'feed_item.postUri') - .selectAll('feed_item') - .select([ - 'post.replyRoot', - 'post.replyParent', - 'post.creator as postAuthorDid', - ]) - } - - selectFeedGeneratorQb(viewer?: string | null) { - const { ref } = this.db.db.dynamic - return this.db.db - .selectFrom('feed_generator') - .innerJoin('actor', 'actor.did', 'feed_generator.creator') - .innerJoin('record', 'record.uri', 'feed_generator.uri') - .selectAll('feed_generator') - .where(notSoftDeletedClause(ref('actor'))) - .where(notSoftDeletedClause(ref('record'))) - .select((qb) => - qb - .selectFrom('like') - .whereRef('like.subject', '=', 'feed_generator.uri') - .select(countAll.as('count')) - .as('likeCount'), - ) - .select((qb) => - qb - .selectFrom('like') - .if(!viewer, (q) => q.where(noMatch)) - .where('like.creator', '=', viewer ?? '') - .whereRef('like.subject', '=', 'feed_generator.uri') - .select('uri') - .as('viewerLike'), - ) - } - - async getPostInfos( - postUris: string[], - viewer: string | null, - ): Promise { - if (postUris.length < 1) return {} - const db = this.db.db - const { ref } = db.dynamic - const posts = await db - .selectFrom('post') - .where('post.uri', 'in', postUris) - .innerJoin('actor', 'actor.did', 'post.creator') - .innerJoin('record', 'record.uri', 'post.uri') - .leftJoin('post_agg', 'post_agg.uri', 'post.uri') - .where(notSoftDeletedClause(ref('actor'))) // Ensures post reply parent/roots get omitted from views when taken down - .where(notSoftDeletedClause(ref('record'))) - .select([ - 'post.uri as uri', - 'post.cid as cid', - 'post.creator as creator', - 'post.sortAt as indexedAt', - 'post.invalidReplyRoot as invalidReplyRoot', - 'post.violatesThreadGate as violatesThreadGate', - 'record.json as recordJson', - 'post_agg.likeCount as likeCount', - 'post_agg.repostCount as repostCount', - 'post_agg.replyCount as replyCount', - 'post.tags as tags', - db - .selectFrom('repost') - .if(!viewer, (q) => q.where(noMatch)) - .where('creator', '=', viewer ?? '') - .whereRef('subject', '=', ref('post.uri')) - .select('uri') - .as('requesterRepost'), - db - .selectFrom('like') - .if(!viewer, (q) => q.where(noMatch)) - .where('creator', '=', viewer ?? '') - .whereRef('subject', '=', ref('post.uri')) - .select('uri') - .as('requesterLike'), - ]) - .execute() - return posts.reduce((acc, cur) => { - const { recordJson, ...post } = cur - const record = jsonStringToLex(recordJson) as PostRecord - const info: PostInfo = { - ...post, - invalidReplyRoot: post.invalidReplyRoot ?? false, - violatesThreadGate: post.violatesThreadGate ?? false, - record, - viewer, - } - return Object.assign(acc, { [post.uri]: info }) - }, {} as PostInfoMap) - } - - async getFeedGeneratorInfos(generatorUris: string[], viewer: string | null) { - if (generatorUris.length < 1) return {} - const feedGens = await this.selectFeedGeneratorQb(viewer) - .where('feed_generator.uri', 'in', generatorUris) - .execute() - return feedGens.reduce( - (acc, cur) => ({ - ...acc, - [cur.uri]: { - ...cur, - viewer: viewer ? { like: cur.viewerLike } : undefined, - }, - }), - {} as FeedGenInfoMap, - ) - } - - async getFeedItems(uris: string[]): Promise> { - if (uris.length < 1) return {} - const feedItems = await this.selectFeedItemQb() - .where('feed_item.uri', 'in', uris) - .execute() - return feedItems.reduce((acc, item) => { - return Object.assign(acc, { [item.uri]: item }) - }, {} as Record) - } - - async postUrisToFeedItems(uris: string[]): Promise { - const feedItems = await this.getFeedItems(uris) - return mapDefined(uris, (uri) => feedItems[uri]) - } - - feedItemRefs(items: FeedRow[]) { - const actorDids = new Set() - const postUris = new Set() - for (const item of items) { - postUris.add(item.postUri) - actorDids.add(item.postAuthorDid) - actorDids.add(item.originatorDid) - if (item.replyParent) { - postUris.add(item.replyParent) - actorDids.add(new AtUri(item.replyParent).hostname) - } - if (item.replyRoot) { - postUris.add(item.replyRoot) - actorDids.add(new AtUri(item.replyRoot).hostname) - } - } - return { dids: actorDids, uris: postUris } - } - - async feedHydration( - refs: { - dids: Set - uris: Set - viewer: string | null - }, - depth = 0, - ): Promise { - const { viewer, dids, uris } = refs - const [posts, threadgates, labels, bam] = await Promise.all([ - this.getPostInfos(Array.from(uris), viewer), - this.threadgatesByPostUri(Array.from(uris)), - this.services.label.getLabelsForSubjects([...uris, ...dids]), - this.services.graph.getBlockAndMuteState( - viewer ? [...dids].map((did) => [viewer, did]) : [], - ), - ]) - - // profileState for labels and bam handled above, profileHydration() shouldn't fetch additional - const [profileState, blocks, lists] = await Promise.all([ - this.services.actor.views.profileHydration( - Array.from(dids), - { viewer }, - { bam, labels }, - ), - this.blocksForPosts(posts, bam), - this.listsForThreadgates(threadgates, viewer), - ]) - const embeds = await this.embedsForPosts(posts, blocks, viewer, depth) - return { - posts, - threadgates, - blocks, - embeds, - labels, // includes info for profiles - bam, // includes info for profiles - profiles: profileState.profiles, - lists: Object.assign(lists, profileState.lists), - } - } - - // applies blocks for visibility to third-parties (i.e. based on post content) - async blocksForPosts( - posts: PostInfoMap, - bam?: BlockAndMuteState, - ): Promise { - const relationships: RelationshipPair[] = [] - const byPost: Record = {} - const didFromUri = (uri) => new AtUri(uri).host - for (const post of Object.values(posts)) { - // skip posts that we can't process or appear to already have been processed - if (!isPostRecord(post.record)) continue - if (byPost[post.uri]) continue - byPost[post.uri] = {} - // 3p block for replies - const parentUri = post.record.reply?.parent.uri - const parentDid = parentUri ? didFromUri(parentUri) : null - // 3p block for record embeds - const embedUris = nestedRecordUris([post.record]) - // gather actor relationships among posts - if (parentDid) { - const pair: RelationshipPair = [post.creator, parentDid] - relationships.push(pair) - byPost[post.uri].reply = pair - } - for (const embedUri of embedUris) { - const pair: RelationshipPair = [post.creator, didFromUri(embedUri)] - relationships.push(pair) - byPost[post.uri].embed = pair - } - } - // compute block state from all actor relationships among posts - const blockState = await this.services.graph.getBlockState( - relationships, - bam, - ) - const result: PostBlocksMap = {} - Object.entries(byPost).forEach(([uri, block]) => { - if (block.embed && blockState.block(block.embed)) { - result[uri] ??= {} - result[uri].embed = true - } - if (block.reply && blockState.block(block.reply)) { - result[uri] ??= {} - result[uri].reply = true - } - }) - return result - } - - async embedsForPosts( - postInfos: PostInfoMap, - blocks: PostBlocksMap, - viewer: string | null, - depth: number, - ) { - const postMap = postRecordsFromInfos(postInfos) - const posts = Object.values(postMap) - if (posts.length < 1) { - return {} - } - const recordEmbedViews = - depth > 1 ? {} : await this.nestedRecordViews(posts, viewer, depth) - - const postEmbedViews: PostEmbedViews = {} - for (const [uri, post] of Object.entries(postMap)) { - const creator = new AtUri(uri).hostname - if (!post.embed) continue - if (isEmbedImages(post.embed)) { - postEmbedViews[uri] = this.views.imagesEmbedView(creator, post.embed) - } else if (isEmbedExternal(post.embed)) { - postEmbedViews[uri] = this.views.externalEmbedView(creator, post.embed) - } else if (isEmbedRecord(post.embed)) { - if (!recordEmbedViews[post.embed.record.uri]) continue - postEmbedViews[uri] = { - $type: 'app.bsky.embed.record#view', - record: applyEmbedBlock( - uri, - blocks, - recordEmbedViews[post.embed.record.uri], - ), - } - } else if (isEmbedRecordWithMedia(post.embed)) { - const embedRecordView = recordEmbedViews[post.embed.record.record.uri] - if (!embedRecordView) continue - const formatted = this.views.getRecordWithMediaEmbedView( - creator, - post.embed, - applyEmbedBlock(uri, blocks, embedRecordView), - ) - if (formatted) { - postEmbedViews[uri] = formatted - } - } - } - return postEmbedViews - } - - async nestedRecordViews( - posts: PostRecord[], - viewer: string | null, - depth: number, - ): Promise { - const nestedUris = nestedRecordUris(posts) - if (nestedUris.length < 1) return {} - const nestedDids = new Set() - const nestedPostUris = new Set() - const nestedFeedGenUris = new Set() - const nestedListUris = new Set() - for (const uri of nestedUris) { - const parsed = new AtUri(uri) - nestedDids.add(parsed.hostname) - if (parsed.collection === ids.AppBskyFeedPost) { - nestedPostUris.add(uri) - } else if (parsed.collection === ids.AppBskyFeedGenerator) { - nestedFeedGenUris.add(uri) - } else if (parsed.collection === ids.AppBskyGraphList) { - nestedListUris.add(uri) - } - } - const [feedState, feedGenInfos, listViews] = await Promise.all([ - this.feedHydration( - { - dids: nestedDids, - uris: nestedPostUris, - viewer, - }, - depth + 1, - ), - this.getFeedGeneratorInfos([...nestedFeedGenUris], viewer), - this.services.graph.getListViews([...nestedListUris], viewer), - ]) - const actorInfos = this.services.actor.views.profileBasicPresentation( - [...nestedDids], - feedState, - viewer, - ) - const recordEmbedViews: RecordEmbedViewRecordMap = {} - for (const uri of nestedUris) { - const collection = new AtUri(uri).collection - if (collection === ids.AppBskyFeedGenerator && feedGenInfos[uri]) { - const genView = this.views.formatFeedGeneratorView( - feedGenInfos[uri], - actorInfos, - ) - if (genView) { - recordEmbedViews[uri] = { - $type: 'app.bsky.feed.defs#generatorView', - ...genView, - } - } - } else if (collection === ids.AppBskyGraphList && listViews[uri]) { - const listView = this.services.graph.formatListView( - listViews[uri], - actorInfos, - ) - if (listView) { - recordEmbedViews[uri] = { - $type: 'app.bsky.graph.defs#listView', - ...listView, - } - } - } else if (collection === ids.AppBskyFeedPost && feedState.posts[uri]) { - const formatted = this.views.formatPostView( - uri, - actorInfos, - feedState.posts, - feedState.threadgates, - feedState.embeds, - feedState.labels, - feedState.lists, - viewer, - ) - recordEmbedViews[uri] = this.views.getRecordEmbedView( - uri, - formatted, - depth > 0, - ) - } else { - recordEmbedViews[uri] = { - $type: 'app.bsky.embed.record#viewNotFound', - uri, - notFound: true, - } - } - } - return recordEmbedViews - } - - async threadgatesByPostUri(postUris: string[]): Promise { - const gates = postUris.length - ? await this.db.db - .selectFrom('record') - .where('uri', 'in', postUris.map(postToThreadgateUri)) - .select(['uri', 'cid', 'json']) - .execute() - : [] - const gatesByPostUri = gates.reduce((acc, gate) => { - const record = jsonStringToLex(gate.json) as ThreadgateRecord - const postUri = threadgateToPostUri(gate.uri) - if (record.post !== postUri) return acc // invalid, skip - acc[postUri] = { uri: gate.uri, cid: gate.cid, record } - return acc - }, {} as ThreadgateInfoMap) - return gatesByPostUri - } - - listsForThreadgates( - threadgates: ThreadgateInfoMap, - viewer: string | null, - ): Promise { - const listsUris = new Set() - Object.values(threadgates).forEach((gate) => { - gate?.record.allow?.forEach((rule) => { - if (isListRule(rule)) { - listsUris.add(rule.list) - } - }) - }) - return this.services.graph.getListViews([...listsUris], viewer) - } -} - -const postRecordsFromInfos = ( - infos: PostInfoMap, -): { [uri: string]: PostRecord } => { - const records: { [uri: string]: PostRecord } = {} - for (const [uri, info] of Object.entries(infos)) { - if (isPostRecord(info.record)) { - records[uri] = info.record - } - } - return records -} - -const nestedRecordUris = (posts: PostRecord[]): string[] => { - const uris: string[] = [] - for (const post of posts) { - if (!post.embed) continue - if (isEmbedRecord(post.embed)) { - uris.push(post.embed.record.uri) - } else if (isEmbedRecordWithMedia(post.embed)) { - uris.push(post.embed.record.record.uri) - } else { - continue - } - } - return uris -} - -type PostRelationships = { reply?: RelationshipPair; embed?: RelationshipPair } - -function applyEmbedBlock( - uri: string, - blocks: PostBlocksMap, - view: RecordEmbedViewRecord, -): RecordEmbedViewRecord { - if (isViewRecord(view) && blocks[uri]?.embed) { - return { - $type: 'app.bsky.embed.record#viewBlocked', - uri: view.uri, - blocked: true, - author: { - did: view.author.did, - viewer: view.author.viewer - ? { - blockedBy: view.author.viewer?.blockedBy, - blocking: view.author.viewer?.blocking, - } - : undefined, - }, - } - } - return view -} diff --git a/packages/bsky/src/services/feed/types.ts b/packages/bsky/src/services/feed/types.ts deleted file mode 100644 index 8d4bd67f6bb..00000000000 --- a/packages/bsky/src/services/feed/types.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { Selectable } from 'kysely' -import { Record as ThreadgateRecord } from '../../lexicon/types/app/bsky/feed/threadgate' -import { View as ImagesEmbedView } from '../../lexicon/types/app/bsky/embed/images' -import { View as ExternalEmbedView } from '../../lexicon/types/app/bsky/embed/external' -import { - ViewBlocked, - ViewNotFound, - ViewRecord, - View as RecordEmbedView, -} from '../../lexicon/types/app/bsky/embed/record' -import { View as RecordWithMediaEmbedView } from '../../lexicon/types/app/bsky/embed/recordWithMedia' -import { - BlockedPost, - GeneratorView, - NotFoundPost, - PostView, -} from '../../lexicon/types/app/bsky/feed/defs' -import { FeedGenerator } from '../../db/tables/feed-generator' -import { ListView } from '../../lexicon/types/app/bsky/graph/defs' -import { ProfileHydrationState } from '../actor' -import { Labels } from '../label' -import { BlockAndMuteState } from '../graph' - -export type PostEmbedViews = { - [uri: string]: PostEmbedView -} - -export type PostEmbedView = - | ImagesEmbedView - | ExternalEmbedView - | RecordEmbedView - | RecordWithMediaEmbedView - -export type PostInfo = { - uri: string - cid: string - creator: string - record: Record - indexedAt: string - likeCount: number | null - repostCount: number | null - replyCount: number | null - requesterRepost: string | null - requesterLike: string | null - invalidReplyRoot: boolean - violatesThreadGate: boolean - viewer: string | null -} - -export type PostInfoMap = { [uri: string]: PostInfo } - -export type PostBlocksMap = { - [uri: string]: { reply?: boolean; embed?: boolean } -} - -export type ThreadgateInfo = { - uri: string - cid: string - record: ThreadgateRecord -} - -export type ThreadgateInfoMap = { - [postUri: string]: ThreadgateInfo -} - -export type FeedGenInfo = Selectable & { - likeCount: number - viewer?: { - like?: string - } -} - -export type FeedGenInfoMap = { [uri: string]: FeedGenInfo } - -export type FeedItemType = 'post' | 'repost' - -export type FeedRow = { - type: FeedItemType - uri: string - cid: string - postUri: string - postAuthorDid: string - originatorDid: string - replyParent: string | null - replyRoot: string | null - sortAt: string -} - -export type MaybePostView = PostView | NotFoundPost | BlockedPost - -export type RecordEmbedViewRecord = - | ViewRecord - | ViewNotFound - | ViewBlocked - | GeneratorView - | ListView - -export type RecordEmbedViewRecordMap = { [uri: string]: RecordEmbedViewRecord } - -export type FeedHydrationState = ProfileHydrationState & { - posts: PostInfoMap - threadgates: ThreadgateInfoMap - embeds: PostEmbedViews - labels: Labels - blocks: PostBlocksMap - bam: BlockAndMuteState -} diff --git a/packages/bsky/src/services/feed/views.ts b/packages/bsky/src/services/feed/views.ts deleted file mode 100644 index a848c88caa0..00000000000 --- a/packages/bsky/src/services/feed/views.ts +++ /dev/null @@ -1,460 +0,0 @@ -import { mapDefined } from '@atproto/common' -import { Database } from '../../db' -import { - FeedViewPost, - GeneratorView, - PostView, -} from '../../lexicon/types/app/bsky/feed/defs' -import { - Main as EmbedImages, - isMain as isEmbedImages, - View as EmbedImagesView, -} from '../../lexicon/types/app/bsky/embed/images' -import { - Main as EmbedExternal, - isMain as isEmbedExternal, - View as EmbedExternalView, -} from '../../lexicon/types/app/bsky/embed/external' -import { Main as EmbedRecordWithMedia } from '../../lexicon/types/app/bsky/embed/recordWithMedia' -import { - ViewBlocked, - ViewNotFound, - ViewRecord, -} from '../../lexicon/types/app/bsky/embed/record' -import { Record as PostRecord } from '../../lexicon/types/app/bsky/feed/post' -import { isListRule } from '../../lexicon/types/app/bsky/feed/threadgate' -import { - PostEmbedViews, - FeedGenInfo, - FeedRow, - MaybePostView, - PostInfoMap, - RecordEmbedViewRecord, - PostBlocksMap, - FeedHydrationState, - ThreadgateInfoMap, - ThreadgateInfo, -} from './types' -import { Labels, getSelfLabels } from '../label' -import { ImageUriBuilder } from '../../image/uri' -import { LabelCache } from '../../label-cache' -import { ActorInfoMap, ActorService } from '../actor' -import { ListInfoMap, GraphService } from '../graph' -import { AtUri } from '@atproto/syntax' -import { parseThreadGate } from './util' - -export class FeedViews { - constructor( - public db: Database, - public imgUriBuilder: ImageUriBuilder, - public labelCache: LabelCache, - ) {} - - static creator(imgUriBuilder: ImageUriBuilder, labelCache: LabelCache) { - return (db: Database) => new FeedViews(db, imgUriBuilder, labelCache) - } - - services = { - actor: ActorService.creator(this.imgUriBuilder, this.labelCache)(this.db), - graph: GraphService.creator(this.imgUriBuilder)(this.db), - } - - formatFeedGeneratorView( - info: FeedGenInfo, - profiles: ActorInfoMap, - ): GeneratorView | undefined { - const profile = profiles[info.creator] - if (!profile) { - return undefined - } - return { - uri: info.uri, - cid: info.cid, - did: info.feedDid, - creator: profile, - displayName: info.displayName ?? undefined, - description: info.description ?? undefined, - descriptionFacets: info.descriptionFacets - ? JSON.parse(info.descriptionFacets) - : undefined, - avatar: info.avatarCid - ? this.imgUriBuilder.getPresetUri( - 'avatar', - info.creator, - info.avatarCid, - ) - : undefined, - likeCount: info.likeCount, - viewer: info.viewer - ? { - like: info.viewer.like ?? undefined, - } - : undefined, - indexedAt: info.indexedAt, - } - } - - formatFeed( - items: FeedRow[], - state: FeedHydrationState, - viewer: string | null, - opts?: { - usePostViewUnion?: boolean - }, - ): FeedViewPost[] { - const { posts, threadgates, profiles, blocks, embeds, labels, lists } = - state - const actors = this.services.actor.views.profileBasicPresentation( - Object.keys(profiles), - state, - viewer, - ) - const feed: FeedViewPost[] = [] - for (const item of items) { - const info = posts[item.postUri] - const post = this.formatPostView( - item.postUri, - actors, - posts, - threadgates, - embeds, - labels, - lists, - viewer, - ) - // skip over not found & blocked posts - if (!post || blocks[post.uri]?.reply) { - continue - } - const feedPost = { post } - if (item.type === 'repost') { - const originator = actors[item.originatorDid] - // skip over reposts where we don't have reposter profile - if (!originator) { - continue - } else { - feedPost['reason'] = { - $type: 'app.bsky.feed.defs#reasonRepost', - by: originator, - indexedAt: item.sortAt, - } - } - } - // posts that violate reply-gating may appear in feeds, but without any thread context - if ( - item.replyParent && - item.replyRoot && - !info?.invalidReplyRoot && - !info?.violatesThreadGate - ) { - const replyParent = this.formatMaybePostView( - item.replyParent, - actors, - posts, - threadgates, - embeds, - labels, - lists, - blocks, - viewer, - opts, - ) - const replyRoot = this.formatMaybePostView( - item.replyRoot, - actors, - posts, - threadgates, - embeds, - labels, - lists, - blocks, - viewer, - opts, - ) - if (replyRoot && replyParent) { - feedPost['reply'] = { - root: replyRoot, - parent: replyParent, - } - } - } - feed.push(feedPost) - } - return feed - } - - formatPostView( - uri: string, - actors: ActorInfoMap, - posts: PostInfoMap, - threadgates: ThreadgateInfoMap, - embeds: PostEmbedViews, - labels: Labels, - lists: ListInfoMap, - viewer: string | null, - ): PostView | undefined { - const post = posts[uri] - const gate = threadgates[uri] - const author = actors[post?.creator] - if (!post || !author) return undefined - const postLabels = labels[uri] ?? [] - const postSelfLabels = getSelfLabels({ - uri: post.uri, - cid: post.cid, - record: post.record, - }) - return { - uri: post.uri, - cid: post.cid, - author: author, - record: post.record, - embed: embeds[uri], - replyCount: post.replyCount ?? 0, - repostCount: post.repostCount ?? 0, - likeCount: post.likeCount ?? 0, - indexedAt: post.indexedAt, - viewer: post.viewer - ? { - repost: post.requesterRepost ?? undefined, - like: post.requesterLike ?? undefined, - replyDisabled: this.userReplyDisabled( - uri, - actors, - posts, - threadgates, - lists, - viewer, - ), - } - : undefined, - labels: [...postLabels, ...postSelfLabels], - threadgate: - !post.record.reply && gate - ? this.formatThreadgate(gate, lists) - : undefined, - } - } - - userReplyDisabled( - uri: string, - actors: ActorInfoMap, - posts: PostInfoMap, - threadgates: ThreadgateInfoMap, - lists: ListInfoMap, - viewer: string | null, - ): boolean | undefined { - if (viewer === null) { - return undefined - } else if (posts[uri]?.violatesThreadGate) { - return true - } - - const rootUriStr: string = - posts[uri]?.record?.['reply']?.['root']?.['uri'] ?? uri - const gate = threadgates[rootUriStr]?.record - if (!gate) { - return undefined - } - const rootPost = posts[rootUriStr]?.record as PostRecord | undefined - const ownerDid = new AtUri(rootUriStr).hostname - - const { - canReply, - allowFollowing, - allowListUris = [], - } = parseThreadGate(viewer, ownerDid, rootPost ?? null, gate ?? null) - - if (canReply) { - return false - } - if (allowFollowing && actors[ownerDid]?.viewer?.followedBy) { - return false - } - for (const listUri of allowListUris) { - const list = lists[listUri] - if (list?.viewerInList) { - return false - } - } - return true - } - - formatMaybePostView( - uri: string, - actors: ActorInfoMap, - posts: PostInfoMap, - threadgates: ThreadgateInfoMap, - embeds: PostEmbedViews, - labels: Labels, - lists: ListInfoMap, - blocks: PostBlocksMap, - viewer: string | null, - opts?: { - usePostViewUnion?: boolean - }, - ): MaybePostView | undefined { - const post = this.formatPostView( - uri, - actors, - posts, - threadgates, - embeds, - labels, - lists, - viewer, - ) - if (!post) { - if (!opts?.usePostViewUnion) return - return this.notFoundPost(uri) - } - if ( - post.author.viewer?.blockedBy || - post.author.viewer?.blocking || - blocks[uri]?.reply - ) { - if (!opts?.usePostViewUnion) return - return this.blockedPost(post) - } - return { - $type: 'app.bsky.feed.defs#postView', - ...post, - } - } - - blockedPost(post: PostView) { - return { - $type: 'app.bsky.feed.defs#blockedPost', - uri: post.uri, - blocked: true as const, - author: { - did: post.author.did, - viewer: post.author.viewer - ? { - blockedBy: post.author.viewer?.blockedBy, - blocking: post.author.viewer?.blocking, - } - : undefined, - }, - } - } - - notFoundPost(uri: string) { - return { - $type: 'app.bsky.feed.defs#notFoundPost', - uri: uri, - notFound: true as const, - } - } - - imagesEmbedView(did: string, embed: EmbedImages) { - const imgViews = embed.images.map((img) => ({ - thumb: this.imgUriBuilder.getPresetUri( - 'feed_thumbnail', - did, - img.image.ref, - ), - fullsize: this.imgUriBuilder.getPresetUri( - 'feed_fullsize', - did, - img.image.ref, - ), - alt: img.alt, - aspectRatio: img.aspectRatio, - })) - return { - $type: 'app.bsky.embed.images#view', - images: imgViews, - } - } - - externalEmbedView(did: string, embed: EmbedExternal) { - const { uri, title, description, thumb } = embed.external - return { - $type: 'app.bsky.embed.external#view', - external: { - uri, - title, - description, - thumb: thumb - ? this.imgUriBuilder.getPresetUri('feed_thumbnail', did, thumb.ref) - : undefined, - }, - } - } - - getRecordEmbedView( - uri: string, - post?: PostView, - omitEmbeds = false, - ): (ViewRecord | ViewNotFound | ViewBlocked) & { $type: string } { - if (!post) { - return { - $type: 'app.bsky.embed.record#viewNotFound', - uri, - notFound: true, - } - } - if (post.author.viewer?.blocking || post.author.viewer?.blockedBy) { - return { - $type: 'app.bsky.embed.record#viewBlocked', - uri, - blocked: true, - author: { - did: post.author.did, - viewer: post.author.viewer - ? { - blockedBy: post.author.viewer?.blockedBy, - blocking: post.author.viewer?.blocking, - } - : undefined, - }, - } - } - return { - $type: 'app.bsky.embed.record#viewRecord', - uri: post.uri, - cid: post.cid, - author: post.author, - value: post.record, - labels: post.labels, - indexedAt: post.indexedAt, - embeds: omitEmbeds ? undefined : post.embed ? [post.embed] : [], - } - } - - getRecordWithMediaEmbedView( - did: string, - embed: EmbedRecordWithMedia, - embedRecordView: RecordEmbedViewRecord, - ) { - let mediaEmbed: EmbedImagesView | EmbedExternalView - if (isEmbedImages(embed.media)) { - mediaEmbed = this.imagesEmbedView(did, embed.media) - } else if (isEmbedExternal(embed.media)) { - mediaEmbed = this.externalEmbedView(did, embed.media) - } else { - return - } - return { - $type: 'app.bsky.embed.recordWithMedia#view', - record: { - record: embedRecordView, - }, - media: mediaEmbed, - } - } - - formatThreadgate(gate: ThreadgateInfo, lists: ListInfoMap) { - return { - uri: gate.uri, - cid: gate.cid, - record: gate.record, - lists: mapDefined(gate.record.allow ?? [], (rule) => { - if (!isListRule(rule)) return - const list = lists[rule.list] - if (!list) return - return this.services.graph.formatListViewBasic(list) - }), - } - } -} diff --git a/packages/bsky/src/services/graph/index.ts b/packages/bsky/src/services/graph/index.ts deleted file mode 100644 index b154a8c47bb..00000000000 --- a/packages/bsky/src/services/graph/index.ts +++ /dev/null @@ -1,381 +0,0 @@ -import { sql } from 'kysely' -import { Database } from '../../db' -import { ImageUriBuilder } from '../../image/uri' -import { valuesList } from '../../db/util' -import { ListInfo } from './types' -import { ActorInfoMap } from '../actor' -import { - ListView, - ListViewBasic, -} from '../../lexicon/types/app/bsky/graph/defs' - -export * from './types' - -export class GraphService { - constructor(public db: Database, public imgUriBuilder: ImageUriBuilder) {} - - static creator(imgUriBuilder: ImageUriBuilder) { - return (db: Database) => new GraphService(db, imgUriBuilder) - } - - async muteActor(info: { - subjectDid: string - mutedByDid: string - createdAt?: Date - }) { - const { subjectDid, mutedByDid, createdAt = new Date() } = info - await this.db - .asPrimary() - .db.insertInto('mute') - .values({ - subjectDid, - mutedByDid, - createdAt: createdAt.toISOString(), - }) - .onConflict((oc) => oc.doNothing()) - .execute() - } - - async unmuteActor(info: { subjectDid: string; mutedByDid: string }) { - const { subjectDid, mutedByDid } = info - await this.db - .asPrimary() - .db.deleteFrom('mute') - .where('subjectDid', '=', subjectDid) - .where('mutedByDid', '=', mutedByDid) - .execute() - } - - async muteActorList(info: { - list: string - mutedByDid: string - createdAt?: Date - }) { - const { list, mutedByDid, createdAt = new Date() } = info - await this.db - .asPrimary() - .db.insertInto('list_mute') - .values({ - listUri: list, - mutedByDid, - createdAt: createdAt.toISOString(), - }) - .onConflict((oc) => oc.doNothing()) - .execute() - } - - async unmuteActorList(info: { list: string; mutedByDid: string }) { - const { list, mutedByDid } = info - await this.db - .asPrimary() - .db.deleteFrom('list_mute') - .where('listUri', '=', list) - .where('mutedByDid', '=', mutedByDid) - .execute() - } - - getListsQb(viewer: string | null) { - const { ref } = this.db.db.dynamic - return this.db.db - .selectFrom('list') - .innerJoin('actor', 'actor.did', 'list.creator') - .selectAll('list') - .selectAll('actor') - .select('list.sortAt as sortAt') - .select([ - this.db.db - .selectFrom('list_mute') - .where('list_mute.mutedByDid', '=', viewer ?? '') - .whereRef('list_mute.listUri', '=', ref('list.uri')) - .select('list_mute.listUri') - .as('viewerMuted'), - this.db.db - .selectFrom('list_block') - .where('list_block.creator', '=', viewer ?? '') - .whereRef('list_block.subjectUri', '=', ref('list.uri')) - .select('list_block.uri') - .as('viewerListBlockUri'), - this.db.db - .selectFrom('list_item') - .whereRef('list_item.listUri', '=', ref('list.uri')) - .where('list_item.subjectDid', '=', viewer ?? '') - .select('list_item.uri') - .as('viewerInList'), - ]) - } - - getListItemsQb() { - return this.db.db - .selectFrom('list_item') - .innerJoin('actor as subject', 'subject.did', 'list_item.subjectDid') - .selectAll('subject') - .select([ - 'list_item.uri as uri', - 'list_item.cid as cid', - 'list_item.sortAt as sortAt', - ]) - } - - async getBlockAndMuteState( - pairs: RelationshipPair[], - bam?: BlockAndMuteState, - ) { - pairs = bam ? pairs.filter((pair) => !bam.has(pair)) : pairs - const result = bam ?? new BlockAndMuteState() - if (!pairs.length) return result - const { ref } = this.db.db.dynamic - const sourceRef = ref('pair.source') - const targetRef = ref('pair.target') - const values = valuesList(pairs.map((p) => sql`${p[0]}, ${p[1]}`)) - const items = await this.db.db - .selectFrom(values.as(sql`pair (source, target)`)) - .select([ - sql`${sourceRef}`.as('source'), - sql`${targetRef}`.as('target'), - this.db.db - .selectFrom('actor_block') - .whereRef('creator', '=', sourceRef) - .whereRef('subjectDid', '=', targetRef) - .select('uri') - .as('blocking'), - this.db.db - .selectFrom('list_item') - .innerJoin('list_block', 'list_block.subjectUri', 'list_item.listUri') - .whereRef('list_block.creator', '=', sourceRef) - .whereRef('list_item.subjectDid', '=', targetRef) - .select('list_item.listUri') - .limit(1) - .as('blockingViaList'), - this.db.db - .selectFrom('actor_block') - .whereRef('creator', '=', targetRef) - .whereRef('subjectDid', '=', sourceRef) - .select('uri') - .as('blockedBy'), - this.db.db - .selectFrom('list_item') - .innerJoin('list_block', 'list_block.subjectUri', 'list_item.listUri') - .whereRef('list_block.creator', '=', targetRef) - .whereRef('list_item.subjectDid', '=', sourceRef) - .select('list_item.listUri') - .limit(1) - .as('blockedByViaList'), - this.db.db - .selectFrom('mute') - .whereRef('mutedByDid', '=', sourceRef) - .whereRef('subjectDid', '=', targetRef) - .select(sql`${true}`.as('val')) - .as('muting'), - this.db.db - .selectFrom('list_item') - .innerJoin('list_mute', 'list_mute.listUri', 'list_item.listUri') - .whereRef('list_mute.mutedByDid', '=', sourceRef) - .whereRef('list_item.subjectDid', '=', targetRef) - .select('list_item.listUri') - .limit(1) - .as('mutingViaList'), - ]) - .selectAll() - .execute() - items.forEach((item) => result.add(item)) - return result - } - - async getBlockState(pairs: RelationshipPair[], bam?: BlockAndMuteState) { - pairs = bam ? pairs.filter((pair) => !bam.has(pair)) : pairs - const result = bam ?? new BlockAndMuteState() - if (!pairs.length) return result - const { ref } = this.db.db.dynamic - const sourceRef = ref('pair.source') - const targetRef = ref('pair.target') - const values = valuesList(pairs.map((p) => sql`${p[0]}, ${p[1]}`)) - const items = await this.db.db - .selectFrom(values.as(sql`pair (source, target)`)) - .select([ - sql`${sourceRef}`.as('source'), - sql`${targetRef}`.as('target'), - this.db.db - .selectFrom('actor_block') - .whereRef('creator', '=', sourceRef) - .whereRef('subjectDid', '=', targetRef) - .select('uri') - .as('blocking'), - this.db.db - .selectFrom('list_item') - .innerJoin('list_block', 'list_block.subjectUri', 'list_item.listUri') - .whereRef('list_block.creator', '=', sourceRef) - .whereRef('list_item.subjectDid', '=', targetRef) - .select('list_item.listUri') - .limit(1) - .as('blockingViaList'), - this.db.db - .selectFrom('actor_block') - .whereRef('creator', '=', targetRef) - .whereRef('subjectDid', '=', sourceRef) - .select('uri') - .as('blockedBy'), - this.db.db - .selectFrom('list_item') - .innerJoin('list_block', 'list_block.subjectUri', 'list_item.listUri') - .whereRef('list_block.creator', '=', targetRef) - .whereRef('list_item.subjectDid', '=', sourceRef) - .select('list_item.listUri') - .limit(1) - .as('blockedByViaList'), - ]) - .selectAll() - .execute() - items.forEach((item) => result.add(item)) - return result - } - - async getListViews(listUris: string[], requester: string | null) { - if (listUris.length < 1) return {} - const lists = await this.getListsQb(requester) - .where('list.uri', 'in', listUris) - .execute() - return lists.reduce( - (acc, cur) => ({ - ...acc, - [cur.uri]: cur, - }), - {}, - ) - } - - formatListView(list: ListInfo, profiles: ActorInfoMap): ListView | undefined { - if (!profiles[list.creator]) { - return undefined - } - return { - ...this.formatListViewBasic(list), - creator: profiles[list.creator], - description: list.description ?? undefined, - descriptionFacets: list.descriptionFacets - ? JSON.parse(list.descriptionFacets) - : undefined, - indexedAt: list.sortAt, - } - } - - formatListViewBasic(list: ListInfo): ListViewBasic { - return { - uri: list.uri, - cid: list.cid, - name: list.name, - purpose: list.purpose, - avatar: list.avatarCid - ? this.imgUriBuilder.getPresetUri( - 'avatar', - list.creator, - list.avatarCid, - ) - : undefined, - indexedAt: list.sortAt, - viewer: { - muted: !!list.viewerMuted, - blocked: list.viewerListBlockUri ?? undefined, - }, - } - } -} - -export type RelationshipPair = [didA: string, didB: string] - -export class BlockAndMuteState { - hasIdx = new Map>() // did -> did - blockIdx = new Map>() // did -> did -> block uri - blockListIdx = new Map>() // did -> did -> list uri - muteIdx = new Map>() // did -> did - muteListIdx = new Map>() // did -> did -> list uri - constructor(items: BlockAndMuteInfo[] = []) { - items.forEach((item) => this.add(item)) - } - add(item: BlockAndMuteInfo) { - if (item.source === item.target) { - return // we do not respect self-blocks or self-mutes - } - if (item.blocking) { - const map = this.blockIdx.get(item.source) ?? new Map() - map.set(item.target, item.blocking) - if (!this.blockIdx.has(item.source)) { - this.blockIdx.set(item.source, map) - } - } - if (item.blockingViaList) { - const map = this.blockListIdx.get(item.source) ?? new Map() - map.set(item.target, item.blockingViaList) - if (!this.blockListIdx.has(item.source)) { - this.blockListIdx.set(item.source, map) - } - } - if (item.blockedBy) { - const map = this.blockIdx.get(item.target) ?? new Map() - map.set(item.source, item.blockedBy) - if (!this.blockIdx.has(item.target)) { - this.blockIdx.set(item.target, map) - } - } - if (item.blockedByViaList) { - const map = this.blockListIdx.get(item.target) ?? new Map() - map.set(item.source, item.blockedByViaList) - if (!this.blockListIdx.has(item.target)) { - this.blockListIdx.set(item.target, map) - } - } - if (item.muting) { - const set = this.muteIdx.get(item.source) ?? new Set() - set.add(item.target) - if (!this.muteIdx.has(item.source)) { - this.muteIdx.set(item.source, set) - } - } - if (item.mutingViaList) { - const map = this.muteListIdx.get(item.source) ?? new Map() - map.set(item.target, item.mutingViaList) - if (!this.muteListIdx.has(item.source)) { - this.muteListIdx.set(item.source, map) - } - } - const set = this.hasIdx.get(item.source) ?? new Set() - set.add(item.target) - if (!this.hasIdx.has(item.source)) { - this.hasIdx.set(item.source, set) - } - } - block(pair: RelationshipPair): boolean { - return !!this.blocking(pair) || !!this.blockedBy(pair) - } - // block or list uri - blocking(pair: RelationshipPair): string | null { - return this.blockIdx.get(pair[0])?.get(pair[1]) ?? this.blockList(pair) - } - // block or list uri - blockedBy(pair: RelationshipPair): string | null { - return this.blocking([pair[1], pair[0]]) - } - mute(pair: RelationshipPair): boolean { - return !!this.muteIdx.get(pair[0])?.has(pair[1]) || !!this.muteList(pair) - } - // list uri - blockList(pair: RelationshipPair): string | null { - return this.blockListIdx.get(pair[0])?.get(pair[1]) ?? null - } - muteList(pair: RelationshipPair): string | null { - return this.muteListIdx.get(pair[0])?.get(pair[1]) ?? null - } - has(pair: RelationshipPair) { - return !!this.hasIdx.get(pair[0])?.has(pair[1]) - } -} - -type BlockAndMuteInfo = { - source: string - target: string - blocking?: string | null - blockingViaList?: string | null - blockedBy?: string | null - blockedByViaList?: string | null - muting?: true | null - mutingViaList?: string | null -} diff --git a/packages/bsky/src/services/graph/types.ts b/packages/bsky/src/services/graph/types.ts deleted file mode 100644 index 5ff254dc383..00000000000 --- a/packages/bsky/src/services/graph/types.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Selectable } from 'kysely' -import { List } from '../../db/tables/list' - -export type ListInfo = Selectable & { - viewerMuted: string | null - viewerListBlockUri: string | null - viewerInList: string | null -} - -export type ListInfoMap = Record diff --git a/packages/bsky/src/services/index.ts b/packages/bsky/src/services/index.ts deleted file mode 100644 index 20bac6935dd..00000000000 --- a/packages/bsky/src/services/index.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Database } from '../db' -import { ImageUriBuilder } from '../image/uri' -import { ActorService } from './actor' -import { FeedService } from './feed' -import { GraphService } from './graph' -import { LabelService } from './label' -import { ImageInvalidator } from '../image/invalidator' -import { LabelCache } from '../label-cache' - -export function createServices(resources: { - imgUriBuilder: ImageUriBuilder - imgInvalidator: ImageInvalidator - labelCache: LabelCache -}): Services { - const { imgUriBuilder, labelCache } = resources - return { - actor: ActorService.creator(imgUriBuilder, labelCache), - feed: FeedService.creator(imgUriBuilder, labelCache), - graph: GraphService.creator(imgUriBuilder), - label: LabelService.creator(labelCache), - } -} - -export type Services = { - actor: FromDb - feed: FromDb - graph: FromDb - label: FromDb -} - -type FromDb = (db: Database) => T diff --git a/packages/bsky/src/services/label/index.ts b/packages/bsky/src/services/label/index.ts deleted file mode 100644 index f44b0439ddf..00000000000 --- a/packages/bsky/src/services/label/index.ts +++ /dev/null @@ -1,173 +0,0 @@ -import { sql } from 'kysely' -import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' -import { Database } from '../../db' -import { Label, isSelfLabels } from '../../lexicon/types/com/atproto/label/defs' -import { ids } from '../../lexicon/lexicons' -import { LabelCache } from '../../label-cache' - -export type Labels = Record - -export class LabelService { - constructor(public db: Database, public cache: LabelCache | null) {} - - static creator(cache: LabelCache | null) { - return (db: Database) => new LabelService(db, cache) - } - - async formatAndCreate( - src: string, - uri: string, - cid: string | null, - labels: { create?: string[]; negate?: string[] }, - ): Promise { - const { create = [], negate = [] } = labels - const toCreate = create.map((val) => ({ - src, - uri, - cid: cid ?? undefined, - val, - neg: false, - cts: new Date().toISOString(), - })) - const toNegate = negate.map((val) => ({ - src, - uri, - cid: cid ?? undefined, - val, - neg: true, - cts: new Date().toISOString(), - })) - const formatted = [...toCreate, ...toNegate] - await this.createLabels(formatted) - return formatted - } - - async createLabels(labels: Label[]) { - if (labels.length < 1) return - const dbVals = labels.map((l) => ({ - ...l, - cid: l.cid ?? '', - neg: !!l.neg, - })) - const { ref } = this.db.db.dynamic - const excluded = (col: string) => ref(`excluded.${col}`) - await this.db - .asPrimary() - .db.insertInto('label') - .values(dbVals) - .onConflict((oc) => - oc.columns(['src', 'uri', 'cid', 'val']).doUpdateSet({ - neg: sql`${excluded('neg')}`, - cts: sql`${excluded('cts')}`, - }), - ) - .execute() - } - - async getLabelsForUris( - subjects: string[], - opts?: { - includeNeg?: boolean - skipCache?: boolean - }, - ): Promise { - if (subjects.length < 1) return {} - const res = - this.cache === null || opts?.skipCache - ? await this.db.db - .selectFrom('label') - .where('label.uri', 'in', subjects) - .if(!opts?.includeNeg, (qb) => qb.where('neg', '=', false)) - .selectAll() - .execute() - : this.cache.forSubjects(subjects, opts?.includeNeg) - return res.reduce((acc, cur) => { - acc[cur.uri] ??= [] - acc[cur.uri].push({ - ...cur, - cid: cur.cid === '' ? undefined : cur.cid, - neg: cur.neg, - }) - return acc - }, {} as Labels) - } - - // gets labels for any record. when did is present, combine labels for both did & profile record. - async getLabelsForSubjects( - subjects: string[], - opts?: { - includeNeg?: boolean - skipCache?: boolean - }, - labels: Labels = {}, - ): Promise { - if (subjects.length < 1) return labels - const expandedSubjects = subjects.flatMap((subject) => { - if (labels[subject]) return [] // skip over labels we already have fetched - if (subject.startsWith('did:')) { - return [ - subject, - AtUri.make(subject, ids.AppBskyActorProfile, 'self').toString(), - ] - } - return subject - }) - const labelsByUri = await this.getLabelsForUris(expandedSubjects, opts) - return Object.keys(labelsByUri).reduce((acc, cur) => { - const uri = cur.startsWith('at://') ? new AtUri(cur) : null - if ( - uri && - uri.collection === ids.AppBskyActorProfile && - uri.rkey === 'self' - ) { - // combine labels for profile + did - const did = uri.hostname - acc[did] ??= [] - acc[did].push(...labelsByUri[cur]) - } - acc[cur] ??= [] - acc[cur].push(...labelsByUri[cur]) - return acc - }, labels) - } - - async getLabels( - subject: string, - opts?: { - includeNeg?: boolean - skipCache?: boolean - }, - ): Promise { - const labels = await this.getLabelsForUris([subject], opts) - return labels[subject] ?? [] - } - - async getLabelsForProfile( - did: string, - opts?: { - includeNeg?: boolean - skipCache?: boolean - }, - ): Promise { - const labels = await this.getLabelsForSubjects([did], opts) - return labels[did] ?? [] - } -} - -export function getSelfLabels(details: { - uri: string | null - cid: string | null - record: Record | null -}): Label[] { - const { uri, cid, record } = details - if (!uri || !cid || !record) return [] - if (!isSelfLabels(record.labels)) return [] - const src = new AtUri(uri).host // record creator - const cts = - typeof record.createdAt === 'string' - ? normalizeDatetimeAlways(record.createdAt) - : new Date(0).toISOString() - return record.labels.values.map(({ val }) => { - return { src, uri, cid, val, cts, neg: false } - }) -} diff --git a/packages/bsky/src/services/util/notification.ts b/packages/bsky/src/services/util/notification.ts deleted file mode 100644 index 811e6e41713..00000000000 --- a/packages/bsky/src/services/util/notification.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { sql } from 'kysely' -import { countAll } from '../../db/util' -import { PrimaryDatabase } from '../../db' - -// i.e. 30 days before the last time the user checked their notifs -export const BEFORE_LAST_SEEN_DAYS = 30 -// i.e. 180 days before the latest unread notification -export const BEFORE_LATEST_UNREAD_DAYS = 180 -// don't consider culling unreads until they hit this threshold, and then enforce beforeLatestUnreadThresholdDays -export const UNREAD_KEPT_COUNT = 500 - -export const tidyNotifications = async (db: PrimaryDatabase, did: string) => { - const stats = await db.db - .selectFrom('notification') - .select([ - sql<0 | 1>`("sortAt" < "lastSeenNotifs")`.as('read'), - countAll.as('count'), - sql`min("sortAt")`.as('earliestAt'), - sql`max("sortAt")`.as('latestAt'), - sql`max("lastSeenNotifs")`.as('lastSeenAt'), - ]) - .leftJoin('actor_state', 'actor_state.did', 'notification.did') - .where('notification.did', '=', did) - .groupBy(sql`1`) // group by read (i.e. 1st column) - .execute() - const readStats = stats.find((stat) => stat.read) - const unreadStats = stats.find((stat) => !stat.read) - let readCutoffAt: Date | undefined - let unreadCutoffAt: Date | undefined - if (readStats) { - readCutoffAt = addDays( - new Date(readStats.lastSeenAt), - -BEFORE_LAST_SEEN_DAYS, - ) - } - if (unreadStats && unreadStats.count > UNREAD_KEPT_COUNT) { - unreadCutoffAt = addDays( - new Date(unreadStats.latestAt), - -BEFORE_LATEST_UNREAD_DAYS, - ) - } - // take most recent of read/unread cutoffs - const cutoffAt = greatest(readCutoffAt, unreadCutoffAt) - if (cutoffAt) { - // skip delete if it wont catch any notifications - const earliestAt = least(readStats?.earliestAt, unreadStats?.earliestAt) - if (earliestAt && earliestAt < cutoffAt.toISOString()) { - await db.db - .deleteFrom('notification') - .where('did', '=', did) - .where('sortAt', '<', cutoffAt.toISOString()) - .execute() - } - } -} - -const addDays = (date: Date, days: number) => { - date.setDate(date.getDate() + days) - return date -} - -const least = (a: T | undefined, b: T | undefined) => { - return a !== undefined && (b === undefined || a < b) ? a : b -} - -const greatest = (a: T | undefined, b: T | undefined) => { - return a !== undefined && (b === undefined || a > b) ? a : b -} - -type Ordered = string | number | Date diff --git a/packages/bsky/src/services/util/post.ts b/packages/bsky/src/services/util/post.ts deleted file mode 100644 index 19e7fa3ee2c..00000000000 --- a/packages/bsky/src/services/util/post.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { sql } from 'kysely' -import DatabaseSchema from '../../db/database-schema' - -export const getDescendentsQb = ( - db: DatabaseSchema, - opts: { - uri: string - depth: number // required, protects against cycles - }, -) => { - const { uri, depth } = opts - const query = db.withRecursive('descendent(uri, depth)', (cte) => { - return cte - .selectFrom('post') - .select(['post.uri as uri', sql`1`.as('depth')]) - .where(sql`1`, '<=', depth) - .where('replyParent', '=', uri) - .unionAll( - cte - .selectFrom('post') - .innerJoin('descendent', 'descendent.uri', 'post.replyParent') - .where('descendent.depth', '<', depth) - .select([ - 'post.uri as uri', - sql`descendent.depth + 1`.as('depth'), - ]), - ) - }) - return query -} - -export const getAncestorsAndSelfQb = ( - db: DatabaseSchema, - opts: { - uri: string - parentHeight: number // required, protects against cycles - }, -) => { - const { uri, parentHeight } = opts - const query = db.withRecursive( - 'ancestor(uri, ancestorUri, height)', - (cte) => { - return cte - .selectFrom('post') - .select([ - 'post.uri as uri', - 'post.replyParent as ancestorUri', - sql`0`.as('height'), - ]) - .where('uri', '=', uri) - .unionAll( - cte - .selectFrom('post') - .innerJoin('ancestor', 'ancestor.ancestorUri', 'post.uri') - .where('ancestor.height', '<', parentHeight) - .select([ - 'post.uri as uri', - 'post.replyParent as ancestorUri', - sql`ancestor.height + 1`.as('height'), - ]), - ) - }, - ) - return query -} diff --git a/packages/bsky/src/services/util/search.ts b/packages/bsky/src/services/util/search.ts deleted file mode 100644 index 994d2f43879..00000000000 --- a/packages/bsky/src/services/util/search.ts +++ /dev/null @@ -1,172 +0,0 @@ -import { sql } from 'kysely' -import { InvalidRequestError } from '@atproto/xrpc-server' -import { Database } from '../../db' -import { notSoftDeletedClause, DbRef, AnyQb } from '../../db/util' -import { GenericKeyset, paginate } from '../../db/pagination' - -export const getUserSearchQuery = ( - db: Database, - opts: { - query: string - limit: number - cursor?: string - includeSoftDeleted?: boolean - }, -) => { - const { ref } = db.db.dynamic - const { query, limit, cursor, includeSoftDeleted } = opts - // Matching user accounts based on handle - const distanceAccount = distance(query, ref('handle')) - let accountsQb = getMatchingAccountsQb(db, { query, includeSoftDeleted }) - accountsQb = paginate(accountsQb, { - limit, - cursor, - direction: 'asc', - keyset: new SearchKeyset(distanceAccount, ref('actor.did')), - }) - // Matching profiles based on display name - const distanceProfile = distance(query, ref('displayName')) - let profilesQb = getMatchingProfilesQb(db, { query, includeSoftDeleted }) - profilesQb = paginate(profilesQb, { - limit, - cursor, - direction: 'asc', - keyset: new SearchKeyset(distanceProfile, ref('actor.did')), - }) - // Combine and paginate result set - return paginate(combineAccountsAndProfilesQb(db, accountsQb, profilesQb), { - limit, - cursor, - direction: 'asc', - keyset: new SearchKeyset(ref('distance'), ref('actor.did')), - }) -} - -// Takes maximal advantage of trigram index at the expense of ability to paginate. -export const getUserSearchQuerySimple = ( - db: Database, - opts: { - query: string - limit: number - }, -) => { - const { ref } = db.db.dynamic - const { query, limit } = opts - // Matching user accounts based on handle - const accountsQb = getMatchingAccountsQb(db, { query }) - .orderBy('distance', 'asc') - .limit(limit) - // Matching profiles based on display name - const profilesQb = getMatchingProfilesQb(db, { query }) - .orderBy('distance', 'asc') - .limit(limit) - // Combine and paginate result set - return paginate(combineAccountsAndProfilesQb(db, accountsQb, profilesQb), { - limit, - direction: 'asc', - keyset: new SearchKeyset(ref('distance'), ref('actor.did')), - }) -} - -// Matching user accounts based on handle -const getMatchingAccountsQb = ( - db: Database, - opts: { query: string; includeSoftDeleted?: boolean }, -) => { - const { ref } = db.db.dynamic - const { query, includeSoftDeleted } = opts - const distanceAccount = distance(query, ref('handle')) - return db.db - .selectFrom('actor') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('actor'))), - ) - .where('actor.handle', 'is not', null) - .where(similar(query, ref('handle'))) // Coarse filter engaging trigram index - .select(['actor.did as did', distanceAccount.as('distance')]) -} - -// Matching profiles based on display name -const getMatchingProfilesQb = ( - db: Database, - opts: { query: string; includeSoftDeleted?: boolean }, -) => { - const { ref } = db.db.dynamic - const { query, includeSoftDeleted } = opts - const distanceProfile = distance(query, ref('displayName')) - return db.db - .selectFrom('profile') - .innerJoin('actor', 'actor.did', 'profile.creator') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('actor'))), - ) - .where('actor.handle', 'is not', null) - .where(similar(query, ref('displayName'))) // Coarse filter engaging trigram index - .select(['profile.creator as did', distanceProfile.as('distance')]) -} - -// Combine profile and account result sets -const combineAccountsAndProfilesQb = ( - db: Database, - accountsQb: AnyQb, - profilesQb: AnyQb, -) => { - // Combine user account and profile results, taking best matches from each - const emptyQb = db.db - .selectFrom('actor') - .where(sql`1 = 0`) - .select([sql.literal('').as('did'), sql`0`.as('distance')]) - const resultsQb = db.db - .selectFrom( - emptyQb - .unionAll(sql`${accountsQb}`) // The sql`` is adding parens - .unionAll(sql`${profilesQb}`) - .as('accounts_and_profiles'), - ) - .selectAll() - .distinctOn('did') // Per did, take whichever of account and profile distance is best - .orderBy('did') - .orderBy('distance') - return db.db - .selectFrom(resultsQb.as('results')) - .innerJoin('actor', 'actor.did', 'results.did') -} - -// Remove leading @ in case a handle is input that way -export const cleanQuery = (query: string) => query.trim().replace(/^@/g, '') - -// Uses pg_trgm strict word similarity to check similarity between a search query and a stored value -const distance = (query: string, ref: DbRef) => - sql`(${query} <<-> ${ref})` - -// Can utilize trigram index to match on strict word similarity. -// The word_similarity_threshold is set to .4 (i.e. distance < .6) in db/index.ts. -const similar = (query: string, ref: DbRef) => - sql`(${query} <% ${ref})` - -type Result = { distance: number; did: string } -type LabeledResult = { primary: number; secondary: string } -export class SearchKeyset extends GenericKeyset { - labelResult(result: Result) { - return { - primary: result.distance, - secondary: result.did, - } - } - labeledResultToCursor(labeled: LabeledResult) { - return { - primary: labeled.primary.toString().replace('0.', '.'), - secondary: labeled.secondary, - } - } - cursorToLabeledResult(cursor: { primary: string; secondary: string }) { - const distance = parseFloat(cursor.primary) - if (isNaN(distance)) { - throw new InvalidRequestError('Malformed cursor') - } - return { - primary: distance, - secondary: cursor.secondary, - } - } -} diff --git a/packages/bsky/src/views/index.ts b/packages/bsky/src/views/index.ts index 67b93c859ee..e8d50320e9f 100644 --- a/packages/bsky/src/views/index.ts +++ b/packages/bsky/src/views/index.ts @@ -19,7 +19,7 @@ import { ThreadgateView, } from '../lexicon/types/app/bsky/feed/defs' import { ListView, ListViewBasic } from '../lexicon/types/app/bsky/graph/defs' -import { compositeTime, creatorFromUri } from './util' +import { compositeTime, creatorFromUri, parseThreadGate } from './util' import { mapDefined } from '@atproto/common' import { isListRule } from '../lexicon/types/app/bsky/feed/threadgate' import { isSelfLabels } from '../lexicon/types/com/atproto/label/defs' @@ -49,7 +49,6 @@ import { Label } from '../hydration/label' import { Repost } from '../hydration/feed' import { RecordInfo } from '../hydration/util' import { Notification } from '../data-plane/gen/bsky_pb' -import { parseThreadGate } from '../services/feed/util' export class Views { constructor(public imgUriBuilder: ImageUriBuilder) {} diff --git a/packages/bsky/src/views/util.ts b/packages/bsky/src/views/util.ts index 3b01d5b64ce..b63e3e9c3cd 100644 --- a/packages/bsky/src/views/util.ts +++ b/packages/bsky/src/views/util.ts @@ -1,4 +1,12 @@ import { AtUri } from '@atproto/syntax' +import { Record as PostRecord } from '../lexicon/types/app/bsky/feed/post' +import { + Record as GateRecord, + isFollowingRule, + isListRule, + isMentionRule, +} from '../lexicon/types/app/bsky/feed/threadgate' +import { isMention } from '../lexicon/types/app/bsky/richtext/facet' const now = () => { return new Date().toISOString() @@ -11,3 +19,42 @@ export const compositeTime = (createdAt = now(), indexedAt = now()): string => { export const creatorFromUri = (uri: string): string => { return new AtUri(uri).hostname } + +export const parseThreadGate = ( + replierDid: string, + ownerDid: string, + rootPost: PostRecord | null, + gate: GateRecord | null, +): ParsedThreadGate => { + if (replierDid === ownerDid) { + return { canReply: true } + } + // if gate.allow is unset then *any* reply is allowed, if it is an empty array then *no* reply is allowed + if (!gate || !gate.allow) { + return { canReply: true } + } + + const allowMentions = !!gate.allow.find(isMentionRule) + const allowFollowing = !!gate.allow.find(isFollowingRule) + const allowListUris = gate.allow?.filter(isListRule).map((item) => item.list) + + // check mentions first since it's quick and synchronous + if (allowMentions) { + const isMentioned = rootPost?.facets?.some((facet) => { + return facet.features.some( + (item) => isMention(item) && item.did === replierDid, + ) + }) + if (isMentioned) { + return { canReply: true, allowMentions, allowFollowing, allowListUris } + } + } + return { allowMentions, allowFollowing, allowListUris } +} + +type ParsedThreadGate = { + canReply?: boolean + allowMentions?: boolean + allowFollowing?: boolean + allowListUris?: string[] +} From c3fb682014402e390f08765ddc2d6c9232d1fc55 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Wed, 20 Dec 2023 17:55:14 -0500 Subject: [PATCH 04/17] remove tests not needed by appview v2, misc reorg --- .../src/api/app/bsky/feed/getFeedSkeleton.ts | 2 +- .../unspecced/getPopularFeedGenerators.ts | 89 +- .../app/bsky/unspecced/getTimelineSkeleton.ts | 2 +- packages/bsky/src/{ => api}/feed-gen/index.ts | 0 packages/bsky/src/{ => api}/feed-gen/types.ts | 6 +- packages/bsky/src/config.ts | 124 +-- packages/bsky/src/context.ts | 2 +- packages/bsky/src/data-plane/server/index.ts | 2 +- .../server}/subscription/util.ts | 6 +- packages/bsky/src/index.ts | 6 +- packages/bsky/src/util/date.ts | 14 - .../__snapshots__/get-record.test.ts.snap | 173 ---- .../admin/__snapshots__/get-repo.test.ts.snap | 57 -- .../moderation-events.test.ts.snap | 146 --- .../moderation-statuses.test.ts.snap | 64 -- .../__snapshots__/moderation.test.ts.snap | 55 - packages/bsky/tests/admin/get-record.test.ts | 111 --- packages/bsky/tests/admin/get-repo.test.ts | 132 --- .../tests/admin/moderation-events.test.ts | 221 ----- .../tests/admin/moderation-statuses.test.ts | 145 --- packages/bsky/tests/admin/moderation.test.ts | 939 ------------------ packages/bsky/tests/admin/repo-search.test.ts | 124 --- packages/bsky/tests/algos/hot-classic.test.ts | 88 -- packages/bsky/tests/algos/whats-hot.test.ts | 118 --- .../bsky/tests/algos/with-friends.test.ts | 145 --- .../fixtures/hiveai_resp_example.json | 401 -------- .../auto-moderator/fuzzy-matcher.test.ts | 165 --- .../bsky/tests/auto-moderator/hive.test.ts | 16 - .../bsky/tests/auto-moderator/labeler.test.ts | 167 ---- .../tests/auto-moderator/takedowns.test.ts | 201 ---- packages/bsky/tests/daemon.test.ts | 191 ---- .../bsky/tests/notification-server.test.ts | 231 ----- .../bsky/tests/pipeline/backpressure.test.ts | 68 -- packages/bsky/tests/pipeline/reingest.test.ts | 55 - .../bsky/tests/pipeline/repartition.test.ts | 86 -- packages/bsky/tests/reprocessing.test.ts | 71 -- packages/bsky/tests/subscription/repo.test.ts | 2 +- packages/bsky/tests/subscription/util.test.ts | 2 +- 38 files changed, 33 insertions(+), 4394 deletions(-) rename packages/bsky/src/{ => api}/feed-gen/index.ts (100%) rename packages/bsky/src/{ => api}/feed-gen/types.ts (75%) rename packages/bsky/src/{ => data-plane/server}/subscription/util.ts (95%) delete mode 100644 packages/bsky/src/util/date.ts delete mode 100644 packages/bsky/tests/admin/__snapshots__/get-record.test.ts.snap delete mode 100644 packages/bsky/tests/admin/__snapshots__/get-repo.test.ts.snap delete mode 100644 packages/bsky/tests/admin/__snapshots__/moderation-events.test.ts.snap delete mode 100644 packages/bsky/tests/admin/__snapshots__/moderation-statuses.test.ts.snap delete mode 100644 packages/bsky/tests/admin/__snapshots__/moderation.test.ts.snap delete mode 100644 packages/bsky/tests/admin/get-record.test.ts delete mode 100644 packages/bsky/tests/admin/get-repo.test.ts delete mode 100644 packages/bsky/tests/admin/moderation-events.test.ts delete mode 100644 packages/bsky/tests/admin/moderation-statuses.test.ts delete mode 100644 packages/bsky/tests/admin/moderation.test.ts delete mode 100644 packages/bsky/tests/admin/repo-search.test.ts delete mode 100644 packages/bsky/tests/algos/hot-classic.test.ts delete mode 100644 packages/bsky/tests/algos/whats-hot.test.ts delete mode 100644 packages/bsky/tests/algos/with-friends.test.ts delete mode 100644 packages/bsky/tests/auto-moderator/fixtures/hiveai_resp_example.json delete mode 100644 packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts delete mode 100644 packages/bsky/tests/auto-moderator/hive.test.ts delete mode 100644 packages/bsky/tests/auto-moderator/labeler.test.ts delete mode 100644 packages/bsky/tests/auto-moderator/takedowns.test.ts delete mode 100644 packages/bsky/tests/daemon.test.ts delete mode 100644 packages/bsky/tests/notification-server.test.ts delete mode 100644 packages/bsky/tests/pipeline/backpressure.test.ts delete mode 100644 packages/bsky/tests/pipeline/reingest.test.ts delete mode 100644 packages/bsky/tests/pipeline/repartition.test.ts delete mode 100644 packages/bsky/tests/reprocessing.test.ts diff --git a/packages/bsky/src/api/app/bsky/feed/getFeedSkeleton.ts b/packages/bsky/src/api/app/bsky/feed/getFeedSkeleton.ts index 5d65044f86f..ae7bb5f2330 100644 --- a/packages/bsky/src/api/app/bsky/feed/getFeedSkeleton.ts +++ b/packages/bsky/src/api/app/bsky/feed/getFeedSkeleton.ts @@ -1,7 +1,7 @@ import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' -import { toSkeletonItem } from '../../../../feed-gen/types' +import { toSkeletonItem } from '../../../feed-gen/types' export default function (server: Server, ctx: AppContext) { server.app.bsky.feed.getFeedSkeleton({ diff --git a/packages/bsky/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts b/packages/bsky/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts index e135d2cb7c1..61836284eef 100644 --- a/packages/bsky/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts +++ b/packages/bsky/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts @@ -1,101 +1,18 @@ import { Server } from '../../../../lexicon' import AppContext from '../../../../context' -import { countAll } from '../../../../db/util' -import { GenericKeyset, paginate } from '../../../../db/pagination' -import { InvalidRequestError } from '@atproto/xrpc-server' -import { GeneratorView } from '../../../../lexicon/types/app/bsky/feed/defs' // THIS IS A TEMPORARY UNSPECCED ROUTE export default function (server: Server, ctx: AppContext) { server.app.bsky.unspecced.getPopularFeedGenerators({ auth: ctx.authOptionalVerifier, - handler: async ({ auth, params }) => { - const { limit, cursor, query } = params - const requester = auth.credentials.did - const db = ctx.db.getReplica() - const { ref } = db.db.dynamic - const feedService = ctx.services.feed(db) - const actorService = ctx.services.actor(db) - - let inner = db.db - .selectFrom('feed_generator') - .select([ - 'uri', - 'cid', - db.db - .selectFrom('like') - .whereRef('like.subject', '=', ref('feed_generator.uri')) - .select(countAll.as('count')) - .as('likeCount'), - ]) - - if (query) { - inner = inner.where((qb) => - qb - .where('feed_generator.displayName', 'ilike', `%${query}%`) - .orWhere('feed_generator.description', 'ilike', `%${query}%`), - ) - } - - let builder = db.db.selectFrom(inner.as('feed_gens')).selectAll() - - const keyset = new LikeCountKeyset(ref('likeCount'), ref('cid')) - builder = paginate(builder, { limit, cursor, keyset, direction: 'desc' }) - - const res = await builder.execute() - - const genInfos = await feedService.getFeedGeneratorInfos( - res.map((feed) => feed.uri), - requester, - ) - - const creators = Object.values(genInfos).map((gen) => gen.creator) - const profiles = await actorService.views.profiles(creators, requester) - - const genViews: GeneratorView[] = [] - for (const row of res) { - const gen = genInfos[row.uri] - if (!gen) continue - const view = feedService.views.formatFeedGeneratorView(gen, profiles) - if (view) { - genViews.push(view) - } - } - + handler: async (_reqCtx) => { + // @TODO not currently supported during appview v2 buildout return { encoding: 'application/json', body: { - cursor: keyset.packFromResult(res), - feeds: genViews, + feeds: [], }, } }, }) } - -type Result = { likeCount: number; cid: string } -type LabeledResult = { primary: number; secondary: string } -export class LikeCountKeyset extends GenericKeyset { - labelResult(result: Result) { - return { - primary: result.likeCount, - secondary: result.cid, - } - } - labeledResultToCursor(labeled: LabeledResult) { - return { - primary: labeled.primary.toString(), - secondary: labeled.secondary, - } - } - cursorToLabeledResult(cursor: { primary: string; secondary: string }) { - const likes = parseInt(cursor.primary, 10) - if (isNaN(likes)) { - throw new InvalidRequestError('Malformed cursor') - } - return { - primary: likes, - secondary: cursor.secondary, - } - } -} diff --git a/packages/bsky/src/api/app/bsky/unspecced/getTimelineSkeleton.ts b/packages/bsky/src/api/app/bsky/unspecced/getTimelineSkeleton.ts index c382fd4cb4f..d91fc9a206b 100644 --- a/packages/bsky/src/api/app/bsky/unspecced/getTimelineSkeleton.ts +++ b/packages/bsky/src/api/app/bsky/unspecced/getTimelineSkeleton.ts @@ -2,7 +2,7 @@ import { Server } from '../../../../lexicon' import { ids } from '../../../../lexicon/lexicons' import AppContext from '../../../../context' import { skeleton } from '../feed/getTimeline' -import { toSkeletonItem } from '../../../../feed-gen/types' +import { toSkeletonItem } from '../../../feed-gen/types' import { urisByCollection } from '../../../../hydration/util' // THIS IS A TEMPORARY UNSPECCED ROUTE diff --git a/packages/bsky/src/feed-gen/index.ts b/packages/bsky/src/api/feed-gen/index.ts similarity index 100% rename from packages/bsky/src/feed-gen/index.ts rename to packages/bsky/src/api/feed-gen/index.ts diff --git a/packages/bsky/src/feed-gen/types.ts b/packages/bsky/src/api/feed-gen/types.ts similarity index 75% rename from packages/bsky/src/feed-gen/types.ts rename to packages/bsky/src/api/feed-gen/types.ts index bf3eee202d0..14b12032074 100644 --- a/packages/bsky/src/feed-gen/types.ts +++ b/packages/bsky/src/api/feed-gen/types.ts @@ -1,6 +1,6 @@ -import AppContext from '../context' -import { SkeletonFeedPost } from '../lexicon/types/app/bsky/feed/defs' -import { QueryParams as SkeletonParams } from '../lexicon/types/app/bsky/feed/getFeedSkeleton' +import AppContext from '../../context' +import { SkeletonFeedPost } from '../../lexicon/types/app/bsky/feed/defs' +import { QueryParams as SkeletonParams } from '../../lexicon/types/app/bsky/feed/getFeedSkeleton' export type AlgoResponseItem = { itemUri: string diff --git a/packages/bsky/src/config.ts b/packages/bsky/src/config.ts index b2808326fa6..5611adb1e78 100644 --- a/packages/bsky/src/config.ts +++ b/packages/bsky/src/config.ts @@ -1,30 +1,20 @@ import assert from 'assert' -import { DAY, HOUR, parseIntWithFallback } from '@atproto/common' export interface ServerConfigValues { - version: string + version?: string debugMode?: boolean port?: number publicUrl?: string serverDid: string feedGenDid?: string - dbPrimaryPostgresUrl: string - dbReplicaPostgresUrls?: string[] - dbReplicaTags?: Record // E.g. { timeline: [0], thread: [1] } - dbPostgresSchema?: string dataplaneUrl: string didPlcUrl: string - didCacheStaleTTL: number - didCacheMaxTTL: number handleResolveNameservers?: string[] imgUriEndpoint?: string blobCacheLocation?: string - searchEndpoint?: string - labelerDid: string adminPassword: string moderatorPassword?: string triagePassword?: string - moderationPushUrl?: string } export class ServerConfig { @@ -32,59 +22,24 @@ export class ServerConfig { constructor(private cfg: ServerConfigValues) {} static readEnv(overrides?: Partial) { - const version = process.env.BSKY_VERSION || '0.0.0' + const version = process.env.BSKY_VERSION || undefined const debugMode = process.env.NODE_ENV !== 'production' - const publicUrl = process.env.PUBLIC_URL || undefined - const serverDid = process.env.SERVER_DID || 'did:example:test' - const feedGenDid = process.env.FEED_GEN_DID - const envPort = parseInt(process.env.PORT || '', 10) + const publicUrl = process.env.BSKY_PUBLIC_URL || undefined + const serverDid = process.env.BSKY_SERVER_DID || 'did:example:test' + const feedGenDid = process.env.BSKY_FEED_GEN_DID + const envPort = parseInt(process.env.BSKY_PORT || '', 10) const port = isNaN(envPort) ? 2584 : envPort - const didPlcUrl = process.env.DID_PLC_URL || 'http://localhost:2582' - const didCacheStaleTTL = parseIntWithFallback( - process.env.DID_CACHE_STALE_TTL, - HOUR, - ) - const didCacheMaxTTL = parseIntWithFallback( - process.env.DID_CACHE_MAX_TTL, - DAY, - ) - const handleResolveNameservers = process.env.HANDLE_RESOLVE_NAMESERVERS - ? process.env.HANDLE_RESOLVE_NAMESERVERS.split(',') + const didPlcUrl = process.env.BSKY_DID_PLC_URL || 'http://localhost:2582' + const handleResolveNameservers = process.env.BSKY_HANDLE_RESOLVE_NAMESERVERS + ? process.env.BSKY_HANDLE_RESOLVE_NAMESERVERS.split(',') : [] - const imgUriEndpoint = process.env.IMG_URI_ENDPOINT - const blobCacheLocation = process.env.BLOB_CACHE_LOC - const searchEndpoint = process.env.SEARCH_ENDPOINT - const dbPrimaryPostgresUrl = - overrides?.dbPrimaryPostgresUrl || process.env.DB_PRIMARY_POSTGRES_URL - let dbReplicaPostgresUrls = overrides?.dbReplicaPostgresUrls - if (!dbReplicaPostgresUrls && process.env.DB_REPLICA_POSTGRES_URLS) { - dbReplicaPostgresUrls = process.env.DB_REPLICA_POSTGRES_URLS.split(',') - } - const dbReplicaTags = overrides?.dbReplicaTags ?? { - '*': getTagIdxs(process.env.DB_REPLICA_TAGS_ANY), // e.g. DB_REPLICA_TAGS_ANY=0,1 - timeline: getTagIdxs(process.env.DB_REPLICA_TAGS_TIMELINE), - feed: getTagIdxs(process.env.DB_REPLICA_TAGS_FEED), - search: getTagIdxs(process.env.DB_REPLICA_TAGS_SEARCH), - thread: getTagIdxs(process.env.DB_REPLICA_TAGS_THREAD), - } - assert( - Object.values(dbReplicaTags) - .flat() - .every((idx) => idx < (dbReplicaPostgresUrls?.length ?? 0)), - 'out of range index in replica tags', - ) - const dbPostgresSchema = process.env.DB_POSTGRES_SCHEMA - assert(dbPrimaryPostgresUrl) - const dataplaneUrl = process.env.DATAPLANE_URL + const imgUriEndpoint = process.env.BSKY_IMG_URI_ENDPOINT + const blobCacheLocation = process.env.BSKY_BLOB_CACHE_LOC + const dataplaneUrl = process.env.BSKY_DATAPLANE_URL + const adminPassword = process.env.BSKY_ADMIN_PASSWORD || 'admin' + const moderatorPassword = process.env.BSKY_MODERATOR_PASSWORD || undefined + const triagePassword = process.env.BSKY_TRIAGE_PASSWORD || undefined assert(dataplaneUrl) - const adminPassword = process.env.ADMIN_PASSWORD || 'admin' - const moderatorPassword = process.env.MODERATOR_PASSWORD || undefined - const triagePassword = process.env.TRIAGE_PASSWORD || undefined - const labelerDid = process.env.LABELER_DID || 'did:example:labeler' - const moderationPushUrl = - overrides?.moderationPushUrl || - process.env.MODERATION_PUSH_URL || - undefined return new ServerConfig({ version, debugMode, @@ -92,23 +47,14 @@ export class ServerConfig { publicUrl, serverDid, feedGenDid, - dbPrimaryPostgresUrl, - dbReplicaPostgresUrls, - dbReplicaTags, - dbPostgresSchema, dataplaneUrl, didPlcUrl, - didCacheStaleTTL, - didCacheMaxTTL, handleResolveNameservers, imgUriEndpoint, blobCacheLocation, - searchEndpoint, - labelerDid, adminPassword, moderatorPassword, triagePassword, - moderationPushUrl, ...stripUndefineds(overrides ?? {}), }) } @@ -150,34 +96,10 @@ export class ServerConfig { return this.cfg.feedGenDid } - get dbPrimaryPostgresUrl() { - return this.cfg.dbPrimaryPostgresUrl - } - - get dbReplicaPostgresUrl() { - return this.cfg.dbReplicaPostgresUrls - } - - get dbReplicaTags() { - return this.cfg.dbReplicaTags - } - - get dbPostgresSchema() { - return this.cfg.dbPostgresSchema - } - get dataplaneUrl() { return this.cfg.dataplaneUrl } - get didCacheStaleTTL() { - return this.cfg.didCacheStaleTTL - } - - get didCacheMaxTTL() { - return this.cfg.didCacheMaxTTL - } - get handleResolveNameservers() { return this.cfg.handleResolveNameservers } @@ -194,14 +116,6 @@ export class ServerConfig { return this.cfg.blobCacheLocation } - get searchEndpoint() { - return this.cfg.searchEndpoint - } - - get labelerDid() { - return this.cfg.labelerDid - } - get adminPassword() { return this.cfg.adminPassword } @@ -213,14 +127,6 @@ export class ServerConfig { get triagePassword() { return this.cfg.triagePassword } - - get moderationPushUrl() { - return this.cfg.moderationPushUrl - } -} - -function getTagIdxs(str?: string): number[] { - return str ? str.split(',').map((item) => parseInt(item, 10)) : [] } function stripUndefineds( diff --git a/packages/bsky/src/context.ts b/packages/bsky/src/context.ts index ff154999cd4..bcad63760c1 100644 --- a/packages/bsky/src/context.ts +++ b/packages/bsky/src/context.ts @@ -5,7 +5,7 @@ import { Keypair } from '@atproto/crypto' import { createServiceJwt } from '@atproto/xrpc-server' import { ServerConfig } from './config' import * as auth from './auth' -import { MountedAlgos } from './feed-gen/types' +import { MountedAlgos } from './api/feed-gen/types' import { DataPlaneClient } from './data-plane/client' import { Hydrator } from './hydration/hydrator' import { Views } from './views' diff --git a/packages/bsky/src/data-plane/server/index.ts b/packages/bsky/src/data-plane/server/index.ts index 554d5bd2f7d..9e37667cca8 100644 --- a/packages/bsky/src/data-plane/server/index.ts +++ b/packages/bsky/src/data-plane/server/index.ts @@ -3,7 +3,7 @@ import events from 'events' import express from 'express' import { expressConnectMiddleware } from '@connectrpc/connect-express' import createRoutes from './routes' -import { Database } from '../../db' +import { Database } from './db' export class DataPlaneServer { constructor(public server: http.Server) {} diff --git a/packages/bsky/src/subscription/util.ts b/packages/bsky/src/data-plane/server/subscription/util.ts similarity index 95% rename from packages/bsky/src/subscription/util.ts rename to packages/bsky/src/data-plane/server/subscription/util.ts index fe367bcc24c..ff04eae4f50 100644 --- a/packages/bsky/src/subscription/util.ts +++ b/packages/bsky/src/data-plane/server/subscription/util.ts @@ -1,7 +1,7 @@ -import PQueue from 'p-queue' -import { OutputSchema as RepoMessage } from '../lexicon/types/com/atproto/sync/subscribeRepos' -import * as message from '../lexicon/types/com/atproto/sync/subscribeRepos' import assert from 'node:assert' +import PQueue from 'p-queue' +import { OutputSchema as RepoMessage } from '../../../lexicon/types/com/atproto/sync/subscribeRepos' +import * as message from '../../../lexicon/types/com/atproto/sync/subscribeRepos' // A queue with arbitrarily many partitions, each processing work sequentially. // Partitions are created lazily and taken out of memory when they go idle. diff --git a/packages/bsky/src/index.ts b/packages/bsky/src/index.ts index 0b3a3853ad9..2ed2c0763b0 100644 --- a/packages/bsky/src/index.ts +++ b/packages/bsky/src/index.ts @@ -14,7 +14,7 @@ import { createServer } from './lexicon' import { ImageUriBuilder } from './image/uri' import { BlobDiskCache, ImageProcessingServer } from './image/server' import AppContext from './context' -import { MountedAlgos } from './feed-gen/types' +import { MountedAlgos } from './api/feed-gen/types' import { Keypair } from '@atproto/crypto' import { createDataPlaneClient } from './data-plane/client' import { Hydrator } from './hydration/hydrator' @@ -22,7 +22,7 @@ import { Views } from './views' export * from './data-plane' export type { ServerConfigValues } from './config' -export type { MountedAlgos } from './feed-gen/types' +export type { MountedAlgos } from './api/feed-gen/types' export { ServerConfig } from './config' export { Database, @@ -31,7 +31,7 @@ export { } from './data-plane/server/db' export { Redis } from './redis' export { AppContext } from './context' -export { makeAlgos } from './feed-gen' +export { makeAlgos } from './api/feed-gen' export class BskyAppView { public ctx: AppContext diff --git a/packages/bsky/src/util/date.ts b/packages/bsky/src/util/date.ts deleted file mode 100644 index af9767a0f7f..00000000000 --- a/packages/bsky/src/util/date.ts +++ /dev/null @@ -1,14 +0,0 @@ -/** - * This function takes a number as input and returns a Date object, - * which is the current date and time plus the input number of hours. - * - * @param {number} hours - The number of hours to add to the current date and time. - * @param {Date} startingDate - If provided, the function will add `hours` to the provided date instead of the current date. - * @returns {Date} - The new Date object, which is the current date and time plus the input number of hours. - */ -export function addHoursToDate(hours: number, startingDate?: Date): Date { - // When date is passe, let's clone before calling `setHours()` so that we are not mutating the original date - const currentDate = startingDate ? new Date(startingDate) : new Date() - currentDate.setHours(currentDate.getHours() + hours) - return currentDate -} diff --git a/packages/bsky/tests/admin/__snapshots__/get-record.test.ts.snap b/packages/bsky/tests/admin/__snapshots__/get-record.test.ts.snap deleted file mode 100644 index 14a83f9dfda..00000000000 --- a/packages/bsky/tests/admin/__snapshots__/get-record.test.ts.snap +++ /dev/null @@ -1,173 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`admin get record view gets a record by uri and cid. 1`] = ` -Object { - "blobCids": Array [], - "blobs": Array [], - "cid": "cids(0)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(0)", - "val": "self-label", - }, - ], - "moderation": Object { - "subjectStatus": Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 1, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedBy": "did:example:admin", - "reviewState": "com.atproto.admin.defs#reviewClosed", - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(0)", - "uri": "record(0)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "alice.test", - "takendown": true, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - }, - "repo": Object { - "did": "user(0)", - "email": "alice@test.com", - "handle": "alice.test", - "indexedAt": "1970-01-01T00:00:00.000Z", - "invitesDisabled": false, - "moderation": Object {}, - "relatedRecords": Array [ - Object { - "$type": "app.bsky.actor.profile", - "avatar": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(1)", - }, - "size": 3976, - }, - "description": "its me!", - "displayName": "ali", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label-a", - }, - Object { - "val": "self-label-b", - }, - ], - }, - }, - ], - }, - "uri": "record(0)", - "value": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label", - }, - ], - }, - "text": "hey there", - }, -} -`; - -exports[`admin get record view gets a record by uri, even when taken down. 1`] = ` -Object { - "blobCids": Array [], - "blobs": Array [], - "cid": "cids(0)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(0)", - "val": "self-label", - }, - ], - "moderation": Object { - "subjectStatus": Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 1, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedBy": "did:example:admin", - "reviewState": "com.atproto.admin.defs#reviewClosed", - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(0)", - "uri": "record(0)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "alice.test", - "takendown": true, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - }, - "repo": Object { - "did": "user(0)", - "email": "alice@test.com", - "handle": "alice.test", - "indexedAt": "1970-01-01T00:00:00.000Z", - "invitesDisabled": false, - "moderation": Object {}, - "relatedRecords": Array [ - Object { - "$type": "app.bsky.actor.profile", - "avatar": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(1)", - }, - "size": 3976, - }, - "description": "its me!", - "displayName": "ali", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label-a", - }, - Object { - "val": "self-label-b", - }, - ], - }, - }, - ], - }, - "uri": "record(0)", - "value": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label", - }, - ], - }, - "text": "hey there", - }, -} -`; diff --git a/packages/bsky/tests/admin/__snapshots__/get-repo.test.ts.snap b/packages/bsky/tests/admin/__snapshots__/get-repo.test.ts.snap deleted file mode 100644 index 4ffd7e3564a..00000000000 --- a/packages/bsky/tests/admin/__snapshots__/get-repo.test.ts.snap +++ /dev/null @@ -1,57 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`admin get repo view gets a repo by did, even when taken down. 1`] = ` -Object { - "did": "user(0)", - "email": "alice@test.com", - "handle": "alice.test", - "indexedAt": "1970-01-01T00:00:00.000Z", - "invites": Array [], - "invitesDisabled": false, - "labels": Array [], - "moderation": Object { - "subjectStatus": Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 1, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedBy": "did:example:admin", - "reviewState": "com.atproto.admin.defs#reviewClosed", - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(0)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "alice.test", - "takendown": true, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - }, - "relatedRecords": Array [ - Object { - "$type": "app.bsky.actor.profile", - "avatar": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(0)", - }, - "size": 3976, - }, - "description": "its me!", - "displayName": "ali", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label-a", - }, - Object { - "val": "self-label-b", - }, - ], - }, - }, - ], -} -`; diff --git a/packages/bsky/tests/admin/__snapshots__/moderation-events.test.ts.snap b/packages/bsky/tests/admin/__snapshots__/moderation-events.test.ts.snap deleted file mode 100644 index 8fa16b311f2..00000000000 --- a/packages/bsky/tests/admin/__snapshots__/moderation-events.test.ts.snap +++ /dev/null @@ -1,146 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`moderation-events get event gets an event by specific id 1`] = ` -Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "createdBy": "user(2)", - "event": Object { - "$type": "com.atproto.admin.defs#modEventReport", - "comment": "X", - "reportType": "com.atproto.moderation.defs#reasonMisleading", - }, - "id": 1, - "subject": Object { - "$type": "com.atproto.admin.defs#repoView", - "did": "user(0)", - "handle": "alice.test", - "indexedAt": "1970-01-01T00:00:00.000Z", - "moderation": Object { - "subjectStatus": Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 1, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedAt": "1970-01-01T00:00:00.000Z", - "lastReviewedBy": "user(1)", - "reviewState": "com.atproto.admin.defs#reviewEscalated", - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(0)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "alice.test", - "takendown": false, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - }, - "relatedRecords": Array [ - Object { - "$type": "app.bsky.actor.profile", - "avatar": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(0)", - }, - "size": 3976, - }, - "description": "its me!", - "displayName": "ali", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label-a", - }, - Object { - "val": "self-label-b", - }, - ], - }, - }, - ], - }, - "subjectBlobCids": Array [], - "subjectBlobs": Array [], -} -`; - -exports[`moderation-events query events returns all events for record or repo 1`] = ` -Array [ - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "createdBy": "user(1)", - "creatorHandle": "alice.test", - "event": Object { - "$type": "com.atproto.admin.defs#modEventReport", - "comment": "X", - "reportType": "com.atproto.moderation.defs#reasonSpam", - }, - "id": 7, - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(0)", - }, - "subjectBlobCids": Array [], - "subjectHandle": "bob.test", - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "createdBy": "user(1)", - "creatorHandle": "alice.test", - "event": Object { - "$type": "com.atproto.admin.defs#modEventReport", - "comment": "X", - "reportType": "com.atproto.moderation.defs#reasonSpam", - }, - "id": 3, - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(0)", - }, - "subjectBlobCids": Array [], - "subjectHandle": "bob.test", - }, -] -`; - -exports[`moderation-events query events returns all events for record or repo 2`] = ` -Array [ - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "createdBy": "user(0)", - "creatorHandle": "bob.test", - "event": Object { - "$type": "com.atproto.admin.defs#modEventReport", - "comment": "X", - "reportType": "com.atproto.moderation.defs#reasonSpam", - }, - "id": 6, - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(0)", - "uri": "record(0)", - }, - "subjectBlobCids": Array [], - "subjectHandle": "alice.test", - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "createdBy": "user(0)", - "creatorHandle": "bob.test", - "event": Object { - "$type": "com.atproto.admin.defs#modEventReport", - "comment": "X", - "reportType": "com.atproto.moderation.defs#reasonSpam", - }, - "id": 2, - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(0)", - "uri": "record(0)", - }, - "subjectBlobCids": Array [], - "subjectHandle": "alice.test", - }, -] -`; diff --git a/packages/bsky/tests/admin/__snapshots__/moderation-statuses.test.ts.snap b/packages/bsky/tests/admin/__snapshots__/moderation-statuses.test.ts.snap deleted file mode 100644 index a4939733d1a..00000000000 --- a/packages/bsky/tests/admin/__snapshots__/moderation-statuses.test.ts.snap +++ /dev/null @@ -1,64 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`moderation-statuses query statuses returns statuses for subjects that received moderation events 1`] = ` -Array [ - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 4, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "reviewState": "com.atproto.admin.defs#reviewOpen", - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(0)", - "uri": "record(0)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "bob.test", - "takendown": false, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 3, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "reviewState": "com.atproto.admin.defs#reviewOpen", - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(0)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "bob.test", - "takendown": false, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 2, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "reviewState": "com.atproto.admin.defs#reviewOpen", - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(1)", - "uri": "record(1)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "alice.test", - "takendown": false, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 1, - "lastReportedAt": "1970-01-01T00:00:00.000Z", - "reviewState": "com.atproto.admin.defs#reviewOpen", - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(1)", - }, - "subjectBlobCids": Array [], - "subjectRepoHandle": "alice.test", - "takendown": false, - "updatedAt": "1970-01-01T00:00:00.000Z", - }, -] -`; diff --git a/packages/bsky/tests/admin/__snapshots__/moderation.test.ts.snap b/packages/bsky/tests/admin/__snapshots__/moderation.test.ts.snap deleted file mode 100644 index 33a973e714f..00000000000 --- a/packages/bsky/tests/admin/__snapshots__/moderation.test.ts.snap +++ /dev/null @@ -1,55 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`moderation reporting creates reports of a record. 1`] = ` -Array [ - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 4, - "reasonType": "com.atproto.moderation.defs#reasonSpam", - "reportedBy": "user(0)", - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(0)", - "uri": "record(0)", - }, - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 5, - "reason": "defamation", - "reasonType": "com.atproto.moderation.defs#reasonOther", - "reportedBy": "user(1)", - "subject": Object { - "$type": "com.atproto.repo.strongRef", - "cid": "cids(1)", - "uri": "record(1)", - }, - }, -] -`; - -exports[`moderation reporting creates reports of a repo. 1`] = ` -Array [ - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 1, - "reasonType": "com.atproto.moderation.defs#reasonSpam", - "reportedBy": "user(0)", - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(1)", - }, - }, - Object { - "createdAt": "1970-01-01T00:00:00.000Z", - "id": 2, - "reason": "impersonation", - "reasonType": "com.atproto.moderation.defs#reasonOther", - "reportedBy": "user(2)", - "subject": Object { - "$type": "com.atproto.admin.defs#repoRef", - "did": "user(1)", - }, - }, -] -`; diff --git a/packages/bsky/tests/admin/get-record.test.ts b/packages/bsky/tests/admin/get-record.test.ts deleted file mode 100644 index 3807724fa6c..00000000000 --- a/packages/bsky/tests/admin/get-record.test.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { SeedClient, TestNetwork } from '@atproto/dev-env' -import AtpAgent from '@atproto/api' -import { AtUri } from '@atproto/syntax' -import { - REASONOTHER, - REASONSPAM, -} from '../../src/lexicon/types/com/atproto/moderation/defs' -import { forSnapshot } from '../_util' -import basicSeed from '../seeds/basic' - -describe('admin get record view', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'views_admin_get_record', - }) - agent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - beforeAll(async () => { - await sc.emitModerationEvent({ - event: { $type: 'com.atproto.admin.defs#modEventFlag' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][0].ref.cidStr, - }, - }) - await sc.createReport({ - reportedBy: sc.dids.bob, - reasonType: REASONSPAM, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][0].ref.cidStr, - }, - }) - await sc.createReport({ - reportedBy: sc.dids.carol, - reasonType: REASONOTHER, - reason: 'defamation', - subject: { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][0].ref.cidStr, - }, - }) - await sc.emitModerationEvent({ - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][0].ref.cidStr, - }, - }) - }) - - it('gets a record by uri, even when taken down.', async () => { - const result = await agent.api.com.atproto.admin.getRecord( - { uri: sc.posts[sc.dids.alice][0].ref.uriStr }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(forSnapshot(result.data)).toMatchSnapshot() - }) - - it('gets a record by uri and cid.', async () => { - const result = await agent.api.com.atproto.admin.getRecord( - { - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][0].ref.cidStr, - }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(forSnapshot(result.data)).toMatchSnapshot() - }) - - it('fails when record does not exist.', async () => { - const promise = agent.api.com.atproto.admin.getRecord( - { - uri: AtUri.make( - sc.dids.alice, - 'app.bsky.feed.post', - 'badrkey', - ).toString(), - }, - { headers: network.pds.adminAuthHeaders() }, - ) - await expect(promise).rejects.toThrow('Record not found') - }) - - it('fails when record cid does not exist.', async () => { - const promise = agent.api.com.atproto.admin.getRecord( - { - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][1].ref.cidStr, // Mismatching cid - }, - { headers: network.pds.adminAuthHeaders() }, - ) - await expect(promise).rejects.toThrow('Record not found') - }) -}) diff --git a/packages/bsky/tests/admin/get-repo.test.ts b/packages/bsky/tests/admin/get-repo.test.ts deleted file mode 100644 index 1e95f8cc0fc..00000000000 --- a/packages/bsky/tests/admin/get-repo.test.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { SeedClient, TestNetwork } from '@atproto/dev-env' -import AtpAgent from '@atproto/api' -import { - REASONOTHER, - REASONSPAM, -} from '../../src/lexicon/types/com/atproto/moderation/defs' -import { forSnapshot } from '../_util' -import basicSeed from '../seeds/basic' - -describe('admin get repo view', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'views_admin_get_repo', - }) - agent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - beforeAll(async () => { - await sc.emitModerationEvent({ - event: { $type: 'com.atproto.admin.defs#modEventAcknowledge' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - }, - }) - await sc.createReport({ - reportedBy: sc.dids.bob, - reasonType: REASONSPAM, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - }, - }) - await sc.createReport({ - reportedBy: sc.dids.carol, - reasonType: REASONOTHER, - reason: 'defamation', - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - }, - }) - await sc.emitModerationEvent({ - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - }, - }) - }) - - it('gets a repo by did, even when taken down.', async () => { - const result = await agent.api.com.atproto.admin.getRepo( - { did: sc.dids.alice }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(forSnapshot(result.data)).toMatchSnapshot() - }) - - it('does not include account emails for triage mods.', async () => { - const { data: admin } = await agent.api.com.atproto.admin.getRepo( - { did: sc.dids.bob }, - { headers: network.pds.adminAuthHeaders() }, - ) - const { data: moderator } = await agent.api.com.atproto.admin.getRepo( - { did: sc.dids.bob }, - { headers: network.pds.adminAuthHeaders('moderator') }, - ) - const { data: triage } = await agent.api.com.atproto.admin.getRepo( - { did: sc.dids.bob }, - { headers: network.pds.adminAuthHeaders('triage') }, - ) - expect(admin.email).toEqual('bob@test.com') - expect(moderator.email).toEqual('bob@test.com') - expect(triage.email).toBeUndefined() - expect(triage).toEqual({ ...admin, email: undefined }) - }) - - it('includes emailConfirmedAt timestamp', async () => { - const { data: beforeEmailVerification } = - await agent.api.com.atproto.admin.getRepo( - { did: sc.dids.bob }, - { headers: network.pds.adminAuthHeaders() }, - ) - - expect(beforeEmailVerification.emailConfirmedAt).toBeUndefined() - const timestampBeforeVerification = Date.now() - const bobsAccount = sc.accounts[sc.dids.bob] - const verificationToken = - await network.pds.ctx.accountManager.createEmailToken( - sc.dids.bob, - 'confirm_email', - ) - await agent.api.com.atproto.server.confirmEmail( - { email: bobsAccount.email, token: verificationToken }, - { - encoding: 'application/json', - - headers: sc.getHeaders(sc.dids.bob), - }, - ) - const { data: afterEmailVerification } = - await agent.api.com.atproto.admin.getRepo( - { did: sc.dids.bob }, - { headers: network.pds.adminAuthHeaders() }, - ) - - expect(afterEmailVerification.emailConfirmedAt).toBeTruthy() - expect( - new Date(afterEmailVerification.emailConfirmedAt as string).getTime(), - ).toBeGreaterThan(timestampBeforeVerification) - }) - - it('fails when repo does not exist.', async () => { - const promise = agent.api.com.atproto.admin.getRepo( - { did: 'did:plc:doesnotexist' }, - { headers: network.pds.adminAuthHeaders() }, - ) - await expect(promise).rejects.toThrow('Repo not found') - }) -}) diff --git a/packages/bsky/tests/admin/moderation-events.test.ts b/packages/bsky/tests/admin/moderation-events.test.ts deleted file mode 100644 index 174167034db..00000000000 --- a/packages/bsky/tests/admin/moderation-events.test.ts +++ /dev/null @@ -1,221 +0,0 @@ -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import AtpAgent, { ComAtprotoAdminDefs } from '@atproto/api' -import { forSnapshot } from '../_util' -import basicSeed from '../seeds/basic' -import { - REASONMISLEADING, - REASONSPAM, -} from '../../src/lexicon/types/com/atproto/moderation/defs' - -describe('moderation-events', () => { - let network: TestNetwork - let agent: AtpAgent - let pdsAgent: AtpAgent - let sc: SeedClient - - const emitModerationEvent = async (eventData) => { - return pdsAgent.api.com.atproto.admin.emitModerationEvent(eventData, { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('moderator'), - }) - } - - const queryModerationEvents = (eventQuery) => - agent.api.com.atproto.admin.queryModerationEvents(eventQuery, { - headers: network.bsky.adminAuthHeaders('moderator'), - }) - - const seedEvents = async () => { - const bobsAccount = { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - } - const alicesAccount = { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - } - const bobsPost = { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.bob][0].ref.uriStr, - cid: sc.posts[sc.dids.bob][0].ref.cidStr, - } - const alicesPost = { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.alice][0].ref.uriStr, - cid: sc.posts[sc.dids.alice][0].ref.cidStr, - } - - for (let i = 0; i < 4; i++) { - await emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventReport', - reportType: i % 2 ? REASONSPAM : REASONMISLEADING, - comment: 'X', - }, - // Report bob's account by alice and vice versa - subject: i % 2 ? bobsAccount : alicesAccount, - createdBy: i % 2 ? sc.dids.alice : sc.dids.bob, - }) - await emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventReport', - reportType: REASONSPAM, - comment: 'X', - }, - // Report bob's post by alice and vice versa - subject: i % 2 ? bobsPost : alicesPost, - createdBy: i % 2 ? sc.dids.alice : sc.dids.bob, - }) - } - } - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_moderation_events', - }) - agent = network.bsky.getClient() - pdsAgent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - await seedEvents() - }) - - afterAll(async () => { - await network.close() - }) - - describe('query events', () => { - it('returns all events for record or repo', async () => { - const [bobsEvents, alicesPostEvents] = await Promise.all([ - queryModerationEvents({ - subject: sc.dids.bob, - }), - queryModerationEvents({ - subject: sc.posts[sc.dids.alice][0].ref.uriStr, - }), - ]) - - expect(forSnapshot(bobsEvents.data.events)).toMatchSnapshot() - expect(forSnapshot(alicesPostEvents.data.events)).toMatchSnapshot() - }) - - it('filters events by types', async () => { - const alicesAccount = { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - } - await Promise.all([ - emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventComment', - comment: 'X', - }, - subject: alicesAccount, - createdBy: 'did:plc:moderator', - }), - emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventEscalate', - comment: 'X', - }, - subject: alicesAccount, - createdBy: 'did:plc:moderator', - }), - ]) - const [allEvents, reportEvents] = await Promise.all([ - queryModerationEvents({ - subject: sc.dids.alice, - }), - queryModerationEvents({ - subject: sc.dids.alice, - types: ['com.atproto.admin.defs#modEventReport'], - }), - ]) - - expect(allEvents.data.events.length).toBeGreaterThan( - reportEvents.data.events.length, - ) - expect( - [...new Set(reportEvents.data.events.map((e) => e.event.$type))].length, - ).toEqual(1) - - expect( - [...new Set(allEvents.data.events.map((e) => e.event.$type))].length, - ).toEqual(3) - }) - - it('returns events for all content by user', async () => { - const [forAccount, forPost] = await Promise.all([ - queryModerationEvents({ - subject: sc.dids.bob, - includeAllUserRecords: true, - }), - queryModerationEvents({ - subject: sc.posts[sc.dids.bob][0].ref.uriStr, - includeAllUserRecords: true, - }), - ]) - - expect(forAccount.data.events.length).toEqual(forPost.data.events.length) - // Save events are returned from both requests - expect(forPost.data.events.map(({ id }) => id).sort()).toEqual( - forAccount.data.events.map(({ id }) => id).sort(), - ) - }) - - it('returns paginated list of events with cursor', async () => { - const allEvents = await queryModerationEvents({ - subject: sc.dids.bob, - includeAllUserRecords: true, - }) - - const getPaginatedEvents = async ( - sortDirection: 'asc' | 'desc' = 'desc', - ) => { - let defaultCursor: undefined | string = undefined - const events: ComAtprotoAdminDefs.ModEventView[] = [] - let count = 0 - do { - // get 1 event at a time and check we get all events - const { data } = await queryModerationEvents({ - limit: 1, - subject: sc.dids.bob, - includeAllUserRecords: true, - cursor: defaultCursor, - sortDirection, - }) - events.push(...data.events) - defaultCursor = data.cursor - count++ - // The count is a circuit breaker to prevent infinite loop in case of failing test - } while (defaultCursor && count < 10) - - return events - } - - const defaultEvents = await getPaginatedEvents() - const reversedEvents = await getPaginatedEvents('asc') - - expect(allEvents.data.events.length).toEqual(4) - expect(defaultEvents.length).toEqual(allEvents.data.events.length) - expect(reversedEvents.length).toEqual(allEvents.data.events.length) - expect(reversedEvents[0].id).toEqual(defaultEvents[3].id) - }) - }) - - describe('get event', () => { - it('gets an event by specific id', async () => { - const { data } = await pdsAgent.api.com.atproto.admin.getModerationEvent( - { - id: 1, - }, - { - headers: network.bsky.adminAuthHeaders('moderator'), - }, - ) - - expect(forSnapshot(data)).toMatchSnapshot() - }) - }) -}) diff --git a/packages/bsky/tests/admin/moderation-statuses.test.ts b/packages/bsky/tests/admin/moderation-statuses.test.ts deleted file mode 100644 index 5109cc43b0e..00000000000 --- a/packages/bsky/tests/admin/moderation-statuses.test.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import AtpAgent, { - ComAtprotoAdminDefs, - ComAtprotoAdminQueryModerationStatuses, -} from '@atproto/api' -import { forSnapshot } from '../_util' -import basicSeed from '../seeds/basic' -import { - REASONMISLEADING, - REASONSPAM, -} from '../../src/lexicon/types/com/atproto/moderation/defs' - -describe('moderation-statuses', () => { - let network: TestNetwork - let agent: AtpAgent - let pdsAgent: AtpAgent - let sc: SeedClient - - const emitModerationEvent = async (eventData) => { - return pdsAgent.api.com.atproto.admin.emitModerationEvent(eventData, { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('moderator'), - }) - } - - const queryModerationStatuses = (statusQuery) => - agent.api.com.atproto.admin.queryModerationStatuses(statusQuery, { - headers: network.bsky.adminAuthHeaders('moderator'), - }) - - const seedEvents = async () => { - const bobsAccount = { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - } - const carlasAccount = { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.alice, - } - const bobsPost = { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.bob][1].ref.uriStr, - cid: sc.posts[sc.dids.bob][1].ref.cidStr, - } - const alicesPost = { - $type: 'com.atproto.repo.strongRef', - uri: sc.posts[sc.dids.alice][1].ref.uriStr, - cid: sc.posts[sc.dids.alice][1].ref.cidStr, - } - - for (let i = 0; i < 4; i++) { - await emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventReport', - reportType: i % 2 ? REASONSPAM : REASONMISLEADING, - comment: 'X', - }, - // Report bob's account by alice and vice versa - subject: i % 2 ? bobsAccount : carlasAccount, - createdBy: i % 2 ? sc.dids.alice : sc.dids.bob, - }) - await emitModerationEvent({ - event: { - $type: 'com.atproto.admin.defs#modEventReport', - reportType: REASONSPAM, - comment: 'X', - }, - // Report bob's post by alice and vice versa - subject: i % 2 ? bobsPost : alicesPost, - createdBy: i % 2 ? sc.dids.alice : sc.dids.bob, - }) - } - } - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_moderation_statuses', - }) - agent = network.bsky.getClient() - pdsAgent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - await seedEvents() - }) - - afterAll(async () => { - await network.close() - }) - - describe('query statuses', () => { - it('returns statuses for subjects that received moderation events', async () => { - const response = await queryModerationStatuses({}) - - expect(forSnapshot(response.data.subjectStatuses)).toMatchSnapshot() - }) - - it('returns paginated statuses', async () => { - // We know there will be exactly 4 statuses in db - const getPaginatedStatuses = async ( - params: ComAtprotoAdminQueryModerationStatuses.QueryParams, - ) => { - let cursor: string | undefined = '' - const statuses: ComAtprotoAdminDefs.SubjectStatusView[] = [] - let count = 0 - do { - const results = await queryModerationStatuses({ - limit: 1, - cursor, - ...params, - }) - cursor = results.data.cursor - statuses.push(...results.data.subjectStatuses) - count++ - // The count is just a brake-check to prevent infinite loop - } while (cursor && count < 10) - - return statuses - } - - const list = await getPaginatedStatuses({}) - expect(list[0].id).toEqual(4) - expect(list[list.length - 1].id).toEqual(1) - - await emitModerationEvent({ - subject: list[1].subject, - event: { - $type: 'com.atproto.admin.defs#modEventAcknowledge', - comment: 'X', - }, - createdBy: sc.dids.bob, - }) - - const listReviewedFirst = await getPaginatedStatuses({ - sortDirection: 'desc', - sortField: 'lastReviewedAt', - }) - - // Verify that the item that was recently reviewed comes up first when sorted descendingly - // while the result set always contains same number of items regardless of sorting - expect(listReviewedFirst[0].id).toEqual(list[1].id) - expect(listReviewedFirst.length).toEqual(list.length) - }) - }) -}) diff --git a/packages/bsky/tests/admin/moderation.test.ts b/packages/bsky/tests/admin/moderation.test.ts deleted file mode 100644 index 5f7fea32c3a..00000000000 --- a/packages/bsky/tests/admin/moderation.test.ts +++ /dev/null @@ -1,939 +0,0 @@ -import { TestNetwork, ImageRef, RecordRef, SeedClient } from '@atproto/dev-env' -import AtpAgent, { - ComAtprotoAdminEmitModerationEvent, - ComAtprotoAdminQueryModerationStatuses, - ComAtprotoModerationCreateReport, -} from '@atproto/api' -import { AtUri } from '@atproto/syntax' -import { forSnapshot } from '../_util' -import basicSeed from '../seeds/basic' -import { - REASONMISLEADING, - REASONOTHER, - REASONSPAM, -} from '../../src/lexicon/types/com/atproto/moderation/defs' -import { - ModEventLabel, - ModEventTakedown, - REVIEWCLOSED, - REVIEWESCALATED, -} from '../../src/lexicon/types/com/atproto/admin/defs' -import { PeriodicModerationEventReversal } from '../../src' - -type BaseCreateReportParams = - | { account: string } - | { content: { uri: string; cid: string } } -type CreateReportParams = BaseCreateReportParams & { - author: string -} & Omit - -type TakedownParams = BaseCreateReportParams & - Omit - -describe('moderation', () => { - let network: TestNetwork - let agent: AtpAgent - let pdsAgent: AtpAgent - let sc: SeedClient - - const createReport = async (params: CreateReportParams) => { - const { author, ...rest } = params - return agent.api.com.atproto.moderation.createReport( - { - // Set default type to spam - reasonType: REASONSPAM, - ...rest, - subject: - 'account' in params - ? { - $type: 'com.atproto.admin.defs#repoRef', - did: params.account, - } - : { - $type: 'com.atproto.repo.strongRef', - uri: params.content.uri, - cid: params.content.cid, - }, - }, - { - headers: await network.serviceHeaders(author), - encoding: 'application/json', - }, - ) - } - - const performTakedown = async ({ - durationInHours, - ...rest - }: TakedownParams & Pick) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventTakedown', - durationInHours, - }, - subject: - 'account' in rest - ? { - $type: 'com.atproto.admin.defs#repoRef', - did: rest.account, - } - : { - $type: 'com.atproto.repo.strongRef', - uri: rest.content.uri, - cid: rest.content.cid, - }, - createdBy: 'did:example:admin', - ...rest, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders(), - }, - ) - - const performReverseTakedown = async (params: TakedownParams) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventReverseTakedown', - }, - subject: - 'account' in params - ? { - $type: 'com.atproto.admin.defs#repoRef', - did: params.account, - } - : { - $type: 'com.atproto.repo.strongRef', - uri: params.content.uri, - cid: params.content.cid, - }, - createdBy: 'did:example:admin', - ...params, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders(), - }, - ) - - const getStatuses = async ( - params: ComAtprotoAdminQueryModerationStatuses.QueryParams, - ) => { - const { data } = await agent.api.com.atproto.admin.queryModerationStatuses( - params, - { headers: network.bsky.adminAuthHeaders() }, - ) - - return data - } - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_moderation', - }) - agent = network.bsky.getClient() - pdsAgent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - describe('reporting', () => { - it('creates reports of a repo.', async () => { - const { data: reportA } = await createReport({ - reasonType: REASONSPAM, - account: sc.dids.bob, - author: sc.dids.alice, - }) - const { data: reportB } = await createReport({ - reasonType: REASONOTHER, - reason: 'impersonation', - account: sc.dids.bob, - author: sc.dids.carol, - }) - expect(forSnapshot([reportA, reportB])).toMatchSnapshot() - }) - - it("allows reporting a repo that doesn't exist.", async () => { - const promise = createReport({ - reasonType: REASONSPAM, - account: 'did:plc:unknown', - author: sc.dids.alice, - }) - await expect(promise).resolves.toBeDefined() - }) - - it('creates reports of a record.', async () => { - const postA = sc.posts[sc.dids.bob][0].ref - const postB = sc.posts[sc.dids.bob][1].ref - const { data: reportA } = await createReport({ - author: sc.dids.alice, - reasonType: REASONSPAM, - content: { - $type: 'com.atproto.repo.strongRef', - uri: postA.uriStr, - cid: postA.cidStr, - }, - }) - const { data: reportB } = await createReport({ - reasonType: REASONOTHER, - reason: 'defamation', - content: { - $type: 'com.atproto.repo.strongRef', - uri: postB.uriStr, - cid: postB.cidStr, - }, - author: sc.dids.carol, - }) - expect(forSnapshot([reportA, reportB])).toMatchSnapshot() - }) - - it("allows reporting a record that doesn't exist.", async () => { - const postA = sc.posts[sc.dids.bob][0].ref - const postB = sc.posts[sc.dids.bob][1].ref - const postUriBad = new AtUri(postA.uriStr) - postUriBad.rkey = 'badrkey' - - const promiseA = createReport({ - reasonType: REASONSPAM, - content: { - $type: 'com.atproto.repo.strongRef', - uri: postUriBad.toString(), - cid: postA.cidStr, - }, - author: sc.dids.alice, - }) - await expect(promiseA).resolves.toBeDefined() - - const promiseB = createReport({ - reasonType: REASONOTHER, - reason: 'defamation', - content: { - $type: 'com.atproto.repo.strongRef', - uri: postB.uri.toString(), - cid: postA.cidStr, // bad cid - }, - author: sc.dids.carol, - }) - await expect(promiseB).resolves.toBeDefined() - }) - }) - - describe('actioning', () => { - it('resolves reports on repos and records.', async () => { - const post = sc.posts[sc.dids.bob][1].ref - - await Promise.all([ - createReport({ - reasonType: REASONSPAM, - account: sc.dids.bob, - author: sc.dids.alice, - }), - createReport({ - reasonType: REASONOTHER, - reason: 'defamation', - content: { - uri: post.uri.toString(), - cid: post.cid.toString(), - }, - author: sc.dids.carol, - }), - ]) - - await performTakedown({ - account: sc.dids.bob, - }) - - const moderationStatusOnBobsAccount = await getStatuses({ - subject: sc.dids.bob, - }) - - // Validate that subject status is set to review closed and takendown flag is on - expect(moderationStatusOnBobsAccount.subjectStatuses[0]).toMatchObject({ - reviewState: REVIEWCLOSED, - takendown: true, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }) - - // Cleanup - await performReverseTakedown({ - account: sc.dids.bob, - }) - }) - - it('supports escalating a subject', async () => { - const alicesPostRef = sc.posts[sc.dids.alice][0].ref - const alicesPostSubject = { - $type: 'com.atproto.repo.strongRef', - uri: alicesPostRef.uri.toString(), - cid: alicesPostRef.cid.toString(), - } - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventEscalate', - comment: 'Y', - }, - subject: alicesPostSubject, - createdBy: 'did:example:admin', - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('triage'), - }, - ) - - const alicesPostStatus = await getStatuses({ - subject: alicesPostRef.uri.toString(), - }) - - expect(alicesPostStatus.subjectStatuses[0]).toMatchObject({ - reviewState: REVIEWESCALATED, - takendown: false, - subject: alicesPostSubject, - }) - }) - - it('adds persistent comment on subject through comment event', async () => { - const alicesPostRef = sc.posts[sc.dids.alice][0].ref - const alicesPostSubject = { - $type: 'com.atproto.repo.strongRef', - uri: alicesPostRef.uri.toString(), - cid: alicesPostRef.cid.toString(), - } - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventComment', - sticky: true, - comment: 'This is a persistent note', - }, - subject: alicesPostSubject, - createdBy: 'did:example:admin', - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('triage'), - }, - ) - - const alicesPostStatus = await getStatuses({ - subject: alicesPostRef.uri.toString(), - }) - - expect(alicesPostStatus.subjectStatuses[0].comment).toEqual( - 'This is a persistent note', - ) - }) - - it('reverses status when revert event is triggered.', async () => { - const alicesPostRef = sc.posts[sc.dids.alice][0].ref - const emitModEvent = async ( - event: ComAtprotoAdminEmitModerationEvent.InputSchema['event'], - overwrites: Partial = {}, - ) => { - const baseAction = { - subject: { - $type: 'com.atproto.repo.strongRef', - uri: alicesPostRef.uriStr, - cid: alicesPostRef.cidStr, - }, - createdBy: 'did:example:admin', - } - return agent.api.com.atproto.admin.emitModerationEvent( - { - event, - ...baseAction, - ...overwrites, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders(), - }, - ) - } - // Validate that subject status is marked as escalated - await emitModEvent({ - $type: 'com.atproto.admin.defs#modEventReport', - reportType: REASONSPAM, - }) - await emitModEvent({ - $type: 'com.atproto.admin.defs#modEventReport', - reportType: REASONMISLEADING, - }) - await emitModEvent({ - $type: 'com.atproto.admin.defs#modEventEscalate', - }) - const alicesPostStatusAfterEscalation = await getStatuses({ - subject: alicesPostRef.uriStr, - }) - expect( - alicesPostStatusAfterEscalation.subjectStatuses[0].reviewState, - ).toEqual(REVIEWESCALATED) - - // Validate that subject status is marked as takendown - - await emitModEvent({ - $type: 'com.atproto.admin.defs#modEventLabel', - createLabelVals: ['nsfw'], - negateLabelVals: [], - }) - const { data: takedownAction } = await emitModEvent({ - $type: 'com.atproto.admin.defs#modEventTakedown', - }) - - const alicesPostStatusAfterTakedown = await getStatuses({ - subject: alicesPostRef.uriStr, - }) - expect(alicesPostStatusAfterTakedown.subjectStatuses[0]).toMatchObject({ - reviewState: REVIEWCLOSED, - takendown: true, - }) - - await emitModEvent({ - $type: 'com.atproto.admin.defs#modEventReverseTakedown', - }) - const alicesPostStatusAfterRevert = await getStatuses({ - subject: alicesPostRef.uriStr, - }) - // Validate that after reverting, the status of the subject is reverted to the last status changing event - expect(alicesPostStatusAfterRevert.subjectStatuses[0]).toMatchObject({ - reviewState: REVIEWCLOSED, - takendown: false, - }) - // Validate that after reverting, the last review date of the subject - // DOES NOT update to the the last status changing event - expect( - new Date( - alicesPostStatusAfterEscalation.subjectStatuses[0] - .lastReviewedAt as string, - ) < - new Date( - alicesPostStatusAfterRevert.subjectStatuses[0] - .lastReviewedAt as string, - ), - ).toBeTruthy() - }) - - it('negates an existing label.', async () => { - const { ctx } = network.bsky - const post = sc.posts[sc.dids.bob][0].ref - const bobsPostSubject = { - $type: 'com.atproto.repo.strongRef', - uri: post.uriStr, - cid: post.cidStr, - } - const labelingService = ctx.services.label(ctx.db.getPrimary()) - await labelingService.formatAndCreate( - ctx.cfg.labelerDid, - post.uriStr, - post.cidStr, - { create: ['kittens'] }, - ) - await emitLabelEvent({ - negateLabelVals: ['kittens'], - createLabelVals: [], - subject: bobsPostSubject, - }) - await expect(getRecordLabels(post.uriStr)).resolves.toEqual([]) - - await emitLabelEvent({ - createLabelVals: ['kittens'], - negateLabelVals: [], - subject: bobsPostSubject, - }) - await expect(getRecordLabels(post.uriStr)).resolves.toEqual(['kittens']) - // Cleanup - await labelingService.formatAndCreate( - ctx.cfg.labelerDid, - post.uriStr, - post.cidStr, - { negate: ['kittens'] }, - ) - }) - - it('no-ops when negating an already-negated label and reverses.', async () => { - const { ctx } = network.bsky - const post = sc.posts[sc.dids.bob][0].ref - const labelingService = ctx.services.label(ctx.db.getPrimary()) - await emitLabelEvent({ - negateLabelVals: ['bears'], - createLabelVals: [], - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uriStr, - cid: post.cidStr, - }, - }) - await expect(getRecordLabels(post.uriStr)).resolves.toEqual([]) - await emitLabelEvent({ - createLabelVals: ['bears'], - negateLabelVals: [], - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uriStr, - cid: post.cidStr, - }, - }) - await expect(getRecordLabels(post.uriStr)).resolves.toEqual(['bears']) - // Cleanup - await labelingService.formatAndCreate( - ctx.cfg.labelerDid, - post.uriStr, - post.cidStr, - { negate: ['bears'] }, - ) - }) - - it('creates non-existing labels and reverses.', async () => { - const post = sc.posts[sc.dids.bob][0].ref - await emitLabelEvent({ - createLabelVals: ['puppies', 'doggies'], - negateLabelVals: [], - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uriStr, - cid: post.cidStr, - }, - }) - await expect(getRecordLabels(post.uriStr)).resolves.toEqual([ - 'puppies', - 'doggies', - ]) - await emitLabelEvent({ - negateLabelVals: ['puppies', 'doggies'], - createLabelVals: [], - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uriStr, - cid: post.cidStr, - }, - }) - await expect(getRecordLabels(post.uriStr)).resolves.toEqual([]) - }) - - it('creates labels on a repo and reverses.', async () => { - await emitLabelEvent({ - createLabelVals: ['puppies', 'doggies'], - negateLabelVals: [], - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }) - await expect(getRepoLabels(sc.dids.bob)).resolves.toEqual([ - 'puppies', - 'doggies', - ]) - await emitLabelEvent({ - negateLabelVals: ['puppies', 'doggies'], - createLabelVals: [], - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }) - await expect(getRepoLabels(sc.dids.bob)).resolves.toEqual([]) - }) - - it('creates and negates labels on a repo and reverses.', async () => { - const { ctx } = network.bsky - const labelingService = ctx.services.label(ctx.db.getPrimary()) - await labelingService.formatAndCreate( - ctx.cfg.labelerDid, - sc.dids.bob, - null, - { create: ['kittens'] }, - ) - await emitLabelEvent({ - createLabelVals: ['puppies'], - negateLabelVals: ['kittens'], - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }) - await expect(getRepoLabels(sc.dids.bob)).resolves.toEqual(['puppies']) - - await emitLabelEvent({ - negateLabelVals: ['puppies'], - createLabelVals: ['kittens'], - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }) - await expect(getRepoLabels(sc.dids.bob)).resolves.toEqual(['kittens']) - }) - - it('does not allow triage moderators to label.', async () => { - const attemptLabel = agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventLabel', - negateLabelVals: ['a'], - createLabelVals: ['b', 'c'], - }, - createdBy: 'did:example:moderator', - reason: 'Y', - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('triage'), - }, - ) - await expect(attemptLabel).rejects.toThrow( - 'Must be a full moderator to label content', - ) - }) - - it('does not allow take down event on takendown post or reverse takedown on available post.', async () => { - await performTakedown({ - account: sc.dids.bob, - }) - await expect( - performTakedown({ - account: sc.dids.bob, - }), - ).rejects.toThrow('Subject is already taken down') - - // Cleanup - await performReverseTakedown({ - account: sc.dids.bob, - }) - await expect( - performReverseTakedown({ - account: sc.dids.bob, - }), - ).rejects.toThrow('Subject is not taken down') - }) - it('fans out repo takedowns to pds', async () => { - await performTakedown({ - account: sc.dids.bob, - }) - - const res1 = await pdsAgent.api.com.atproto.admin.getSubjectStatus( - { - did: sc.dids.bob, - }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(res1.data.takedown?.applied).toBe(true) - - // cleanup - await performReverseTakedown({ account: sc.dids.bob }) - - const res2 = await pdsAgent.api.com.atproto.admin.getSubjectStatus( - { - did: sc.dids.bob, - }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(res2.data.takedown?.applied).toBe(false) - }) - - it('fans out record takedowns to pds', async () => { - const post = sc.posts[sc.dids.bob][0] - const uri = post.ref.uriStr - const cid = post.ref.cidStr - await performTakedown({ - content: { uri, cid }, - }) - const res1 = await pdsAgent.api.com.atproto.admin.getSubjectStatus( - { uri }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(res1.data.takedown?.applied).toBe(true) - - // cleanup - await performReverseTakedown({ content: { uri, cid } }) - - const res2 = await pdsAgent.api.com.atproto.admin.getSubjectStatus( - { uri }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(res2.data.takedown?.applied).toBe(false) - }) - - it('allows full moderators to takedown.', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventTakedown', - }, - createdBy: 'did:example:moderator', - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('moderator'), - }, - ) - // cleanup - await reverse({ - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }) - }) - - it('does not allow non-full moderators to takedown.', async () => { - const attemptTakedownTriage = - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventTakedown', - }, - createdBy: 'did:example:moderator', - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.bob, - }, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders('triage'), - }, - ) - await expect(attemptTakedownTriage).rejects.toThrow( - 'Must be a full moderator to perform an account takedown', - ) - }) - it('automatically reverses actions marked with duration', async () => { - await createReport({ - reasonType: REASONSPAM, - account: sc.dids.bob, - author: sc.dids.alice, - }) - const { data: action } = await performTakedown({ - account: sc.dids.bob, - // Use negative value to set the expiry time in the past so that the action is automatically reversed - // right away without having to wait n number of hours for a successful assertion - durationInHours: -1, - }) - - const { data: statusesAfterTakedown } = - await agent.api.com.atproto.admin.queryModerationStatuses( - { subject: sc.dids.bob }, - { headers: network.bsky.adminAuthHeaders('moderator') }, - ) - - expect(statusesAfterTakedown.subjectStatuses[0]).toMatchObject({ - takendown: true, - }) - - // In the actual app, this will be instantiated and run on server startup - const periodicReversal = new PeriodicModerationEventReversal( - network.bsky.ctx, - ) - await periodicReversal.findAndRevertDueActions() - - const [{ data: eventList }, { data: statuses }] = await Promise.all([ - agent.api.com.atproto.admin.queryModerationEvents( - { subject: sc.dids.bob }, - { headers: network.bsky.adminAuthHeaders('moderator') }, - ), - agent.api.com.atproto.admin.queryModerationStatuses( - { subject: sc.dids.bob }, - { headers: network.bsky.adminAuthHeaders('moderator') }, - ), - ]) - - expect(statuses.subjectStatuses[0]).toMatchObject({ - takendown: false, - reviewState: REVIEWCLOSED, - }) - // Verify that the automatic reversal is attributed to the original moderator of the temporary action - // and that the reason is set to indicate that the action was automatically reversed. - expect(eventList.events[0]).toMatchObject({ - createdBy: action.createdBy, - event: { - $type: 'com.atproto.admin.defs#modEventReverseTakedown', - comment: - '[SCHEDULED_REVERSAL] Reverting action as originally scheduled', - }, - }) - }) - - async function emitLabelEvent( - opts: Partial & { - subject: ComAtprotoAdminEmitModerationEvent.InputSchema['subject'] - createLabelVals: ModEventLabel['createLabelVals'] - negateLabelVals: ModEventLabel['negateLabelVals'] - }, - ) { - const { createLabelVals, negateLabelVals, ...rest } = opts - const result = await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventLabel', - createLabelVals, - negateLabelVals, - }, - createdBy: 'did:example:admin', - reason: 'Y', - ...opts, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders(), - }, - ) - return result.data - } - - async function reverse( - opts: Partial & { - subject: ComAtprotoAdminEmitModerationEvent.InputSchema['subject'] - }, - ) { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventReverseTakedown', - }, - createdBy: 'did:example:admin', - reason: 'Y', - ...opts, - }, - { - encoding: 'application/json', - headers: network.bsky.adminAuthHeaders(), - }, - ) - } - - async function getRecordLabels(uri: string) { - const result = await agent.api.com.atproto.admin.getRecord( - { uri }, - { headers: network.bsky.adminAuthHeaders() }, - ) - const labels = result.data.labels ?? [] - return labels.map((l) => l.val) - } - - async function getRepoLabels(did: string) { - const result = await agent.api.com.atproto.admin.getRepo( - { did }, - { headers: network.bsky.adminAuthHeaders() }, - ) - const labels = result.data.labels ?? [] - return labels.map((l) => l.val) - } - }) - - describe('blob takedown', () => { - let post: { ref: RecordRef; images: ImageRef[] } - let blob: ImageRef - let imageUri: string - beforeAll(async () => { - const { ctx } = network.bsky - post = sc.posts[sc.dids.carol][0] - blob = post.images[1] - imageUri = ctx.imgUriBuilder - .getPresetUri( - 'feed_thumbnail', - sc.dids.carol, - blob.image.ref.toString(), - ) - .replace(ctx.cfg.publicUrl || '', network.bsky.url) - // Warm image server cache - await fetch(imageUri) - const cached = await fetch(imageUri) - expect(cached.headers.get('x-cache')).toEqual('hit') - await performTakedown({ - content: { - uri: post.ref.uriStr, - cid: post.ref.cidStr, - }, - subjectBlobCids: [blob.image.ref.toString()], - }) - }) - - it('sets blobCids in moderation status', async () => { - const { subjectStatuses } = await getStatuses({ - subject: post.ref.uriStr, - }) - - expect(subjectStatuses[0].subjectBlobCids).toEqual([ - blob.image.ref.toString(), - ]) - }) - - it('prevents resolution of blob', async () => { - const blobPath = `/blob/${sc.dids.carol}/${blob.image.ref.toString()}` - const resolveBlob = await fetch(`${network.bsky.url}${blobPath}`) - expect(resolveBlob.status).toEqual(404) - expect(await resolveBlob.json()).toEqual({ - error: 'NotFoundError', - message: 'Blob not found', - }) - }) - - it('prevents image blob from being served, even when cached.', async () => { - const fetchImage = await fetch(imageUri) - expect(fetchImage.status).toEqual(404) - expect(await fetchImage.json()).toEqual({ message: 'Image not found' }) - }) - - it('fans takedown out to pds', async () => { - const res = await pdsAgent.api.com.atproto.admin.getSubjectStatus( - { - did: sc.dids.carol, - blob: blob.image.ref.toString(), - }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(res.data.takedown?.applied).toBe(true) - }) - - it('restores blob when action is reversed.', async () => { - await performReverseTakedown({ - content: { - uri: post.ref.uriStr, - cid: post.ref.cidStr, - }, - subjectBlobCids: [blob.image.ref.toString()], - }) - - // Can resolve blob - const blobPath = `/blob/${sc.dids.carol}/${blob.image.ref.toString()}` - const resolveBlob = await fetch(`${network.bsky.url}${blobPath}`) - expect(resolveBlob.status).toEqual(200) - - // Can fetch through image server - const fetchImage = await fetch(imageUri) - expect(fetchImage.status).toEqual(200) - const size = Number(fetchImage.headers.get('content-length')) - expect(size).toBeGreaterThan(9000) - }) - - it('fans reversal out to pds', async () => { - const res = await pdsAgent.api.com.atproto.admin.getSubjectStatus( - { - did: sc.dids.carol, - blob: blob.image.ref.toString(), - }, - { headers: network.pds.adminAuthHeaders() }, - ) - expect(res.data.takedown?.applied).toBe(false) - }) - }) -}) diff --git a/packages/bsky/tests/admin/repo-search.test.ts b/packages/bsky/tests/admin/repo-search.test.ts deleted file mode 100644 index 9e643ba12e0..00000000000 --- a/packages/bsky/tests/admin/repo-search.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { SeedClient, TestNetwork } from '@atproto/dev-env' -import AtpAgent from '@atproto/api' -import { paginateAll } from '../_util' -import usersBulkSeed from '../seeds/users-bulk' - -describe('admin repo search view', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - let headers: { [s: string]: string } - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'views_admin_repo_search', - }) - agent = network.pds.getClient() - sc = network.getSeedClient() - await usersBulkSeed(sc) - headers = network.pds.adminAuthHeaders() - await network.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - beforeAll(async () => { - await sc.emitModerationEvent({ - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids['cara-wiegand69.test'], - }, - }) - }) - - it('gives relevant results', async () => { - const result = await agent.api.com.atproto.admin.searchRepos( - { term: 'car' }, - { headers }, - ) - - const handles = result.data.repos.map((u) => u.handle) - - const shouldContain = [ - 'cara-wiegand69.test', // Present despite repo takedown - 'carlos6.test', - 'carolina-mcdermott77.test', - ] - - shouldContain.forEach((handle) => expect(handles).toContain(handle)) - - const shouldNotContain = [ - 'sven70.test', - 'hilario84.test', - 'santa-hermann78.test', - 'dylan61.test', - 'preston-harris.test', - 'loyce95.test', - 'melyna-zboncak.test', - ] - - shouldNotContain.forEach((handle) => expect(handles).not.toContain(handle)) - }) - - it('finds repo by did', async () => { - const term = sc.dids['cara-wiegand69.test'] - const res = await agent.api.com.atproto.admin.searchRepos( - { term }, - { headers }, - ) - - expect(res.data.repos.length).toEqual(1) - expect(res.data.repos[0].did).toEqual(term) - }) - - it('paginates with term', async () => { - const results = (results) => results.flatMap((res) => res.users) - const paginator = async (cursor?: string) => { - const res = await agent.api.com.atproto.admin.searchRepos( - { term: 'p', cursor, limit: 3 }, - { headers }, - ) - return res.data - } - - const paginatedAll = await paginateAll(paginator) - paginatedAll.forEach((res) => - expect(res.repos.length).toBeLessThanOrEqual(3), - ) - - const full = await agent.api.com.atproto.admin.searchRepos( - { term: 'p' }, - { headers }, - ) - - expect(full.data.repos.length).toBeGreaterThan(3) - expect(results(paginatedAll)).toEqual(results([full.data])) - }) - - it('paginates without term', async () => { - const results = (results) => results.flatMap((res) => res.repos) - const paginator = async (cursor?: string) => { - const res = await agent.api.com.atproto.admin.searchRepos( - { cursor, limit: 3 }, - { headers }, - ) - return res.data - } - - const paginatedAll = await paginateAll(paginator, 5) - paginatedAll.forEach((res) => - expect(res.repos.length).toBeLessThanOrEqual(3), - ) - - const full = await agent.api.com.atproto.admin.searchRepos( - { limit: 15 }, - { headers }, - ) - - expect(full.data.repos.length).toEqual(15) - expect(results(paginatedAll)).toEqual(results([full.data])) - }) -}) diff --git a/packages/bsky/tests/algos/hot-classic.test.ts b/packages/bsky/tests/algos/hot-classic.test.ts deleted file mode 100644 index bb44ca5c0e8..00000000000 --- a/packages/bsky/tests/algos/hot-classic.test.ts +++ /dev/null @@ -1,88 +0,0 @@ -import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import basicSeed from '../seeds/basic' -import { makeAlgos } from '../../src' - -describe('algo hot-classic', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - - // account dids, for convenience - let alice: string - let bob: string - - const feedPublisherDid = 'did:example:feed-publisher' - const feedUri = AtUri.make( - feedPublisherDid, - 'app.bsky.feed.generator', - 'hot-classic', - ).toString() - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_algo_hot_classic', - bsky: { algos: makeAlgos(feedPublisherDid) }, - }) - agent = new AtpAgent({ service: network.bsky.url }) - sc = network.getSeedClient() - await basicSeed(sc) - - alice = sc.dids.alice - bob = sc.dids.bob - await network.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - it('returns well liked posts', async () => { - const img = await sc.uploadFile( - alice, - 'tests/sample-img/key-landscape-small.jpg', - 'image/jpeg', - ) - const one = await sc.post(alice, 'first post', undefined, [img]) - const two = await sc.post(bob, 'bobby boi') - const three = await sc.reply(bob, one.ref, one.ref, 'reply') - - for (let i = 0; i < 12; i++) { - const name = `user${i}` - await sc.createAccount(name, { - handle: `user${i}.test`, - email: `user${i}@test.com`, - password: 'password', - }) - await sc.like(sc.dids[name], one.ref) - await sc.like(sc.dids[name], two.ref) - await sc.like(sc.dids[name], three.ref) - } - await network.processAll() - - const res = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri }, - { headers: await network.serviceHeaders(alice) }, - ) - const feedUris = res.data.feed.map((i) => i.post.uri).sort() - const expected = [one.ref.uriStr, two.ref.uriStr].sort() - expect(feedUris).toEqual(expected) - }) - - it('paginates', async () => { - const res = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri }, - { headers: await network.serviceHeaders(alice) }, - ) - const first = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri, limit: 1 }, - { headers: await network.serviceHeaders(alice) }, - ) - const second = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri, cursor: first.data.cursor }, - { headers: await network.serviceHeaders(alice) }, - ) - - expect([...first.data.feed, ...second.data.feed]).toEqual(res.data.feed) - }) -}) diff --git a/packages/bsky/tests/algos/whats-hot.test.ts b/packages/bsky/tests/algos/whats-hot.test.ts deleted file mode 100644 index 9fb93a8ce50..00000000000 --- a/packages/bsky/tests/algos/whats-hot.test.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { HOUR } from '@atproto/common' -import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import basicSeed from '../seeds/basic' -import { makeAlgos } from '../../src' - -describe.skip('algo whats-hot', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - - // account dids, for convenience - let alice: string - let bob: string - let carol: string - - const feedPublisherDid = 'did:example:feed-publisher' - const feedUri = AtUri.make( - feedPublisherDid, - 'app.bsky.feed.generator', - 'whats-hot', - ).toString() - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_algo_whats_hot', - bsky: { algos: makeAlgos(feedPublisherDid) }, - }) - agent = new AtpAgent({ service: network.bsky.url }) - sc = network.getSeedClient() - await basicSeed(sc) - - alice = sc.dids.alice - bob = sc.dids.bob - carol = sc.dids.carol - await network.processAll() - await network.bsky.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - it('returns well liked posts', async () => { - const img = await sc.uploadFile( - alice, - 'tests/sample-img/key-landscape-small.jpg', - 'image/jpeg', - ) - const one = await sc.post(carol, 'carol is in the chat') - const two = await sc.post(carol, "it's me, carol") - const three = await sc.post(alice, 'first post', undefined, [img]) - const four = await sc.post(bob, 'bobby boi') - const five = await sc.post(bob, 'another one') - - for (let i = 0; i < 20; i++) { - const name = `user${i}` - await sc.createAccount(name, { - handle: `user${i}.test`, - email: `user${i}@test.com`, - password: 'password', - }) - await sc.like(sc.dids[name], three.ref) // will be down-regulated by time - if (i > 3) { - await sc.like(sc.dids[name], one.ref) - } - if (i > 5) { - await sc.like(sc.dids[name], two.ref) - } - if (i > 7) { - await sc.like(sc.dids[name], four.ref) - await sc.like(sc.dids[name], five.ref) - } - } - await network.bsky.processAll() - - // move the 3rd post 5 hours into the past to check gravity - await network.bsky.ctx.db - .getPrimary() - .db.updateTable('post') - .where('uri', '=', three.ref.uriStr) - .set({ indexedAt: new Date(Date.now() - 5 * HOUR).toISOString() }) - .execute() - - await network.bsky.ctx.db - .getPrimary() - .refreshMaterializedView('algo_whats_hot_view') - - const res = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri }, - { headers: await network.serviceHeaders(alice) }, - ) - expect(res.data.feed[0].post.uri).toBe(one.ref.uriStr) - expect(res.data.feed[1].post.uri).toBe(two.ref.uriStr) - const indexOfThird = res.data.feed.findIndex( - (item) => item.post.uri === three.ref.uriStr, - ) - // doesn't quite matter where this cam in but it should be down-regulated pretty severely from gravity - expect(indexOfThird).toBeGreaterThan(3) - }) - - it('paginates', async () => { - const res = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri }, - { headers: await network.serviceHeaders(alice) }, - ) - const first = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri, limit: 3 }, - { headers: await network.serviceHeaders(alice) }, - ) - const second = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri, cursor: first.data.cursor }, - { headers: await network.serviceHeaders(alice) }, - ) - - expect([...first.data.feed, ...second.data.feed]).toEqual(res.data.feed) - }) -}) diff --git a/packages/bsky/tests/algos/with-friends.test.ts b/packages/bsky/tests/algos/with-friends.test.ts deleted file mode 100644 index 2c5339849c8..00000000000 --- a/packages/bsky/tests/algos/with-friends.test.ts +++ /dev/null @@ -1,145 +0,0 @@ -import AtpAgent, { AtUri } from '@atproto/api' -import userSeed from '../seeds/users' -import { makeAlgos } from '../../src' -import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env' - -describe.skip('algo with friends', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - - // account dids, for convenience - let alice: string - let bob: string - let carol: string - let dan: string - - const feedPublisherDid = 'did:example:feed-publisher' - const feedUri = AtUri.make( - feedPublisherDid, - 'app.bsky.feed.generator', - 'with-friends', - ).toString() - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_algo_with_friends', - bsky: { algos: makeAlgos(feedPublisherDid) }, - }) - agent = new AtpAgent({ service: network.bsky.url }) - sc = network.getSeedClient() - await userSeed(sc) - - alice = sc.dids.alice - bob = sc.dids.bob - carol = sc.dids.carol - dan = sc.dids.dan - await network.processAll() - await network.bsky.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - let expectedFeed: string[] - - it('setup', async () => { - for (let i = 0; i < 10; i++) { - const name = `user${i}` - await sc.createAccount(name, { - handle: `user${i}.test`, - email: `user${i}@test.com`, - password: 'password', - }) - } - - const hitLikeThreshold = async (ref: RecordRef) => { - for (let i = 0; i < 10; i++) { - const name = `user${i}` - await sc.like(sc.dids[name], ref) - } - } - - // bob and dan are mutuals of alice, all userN are out-of-network. - await sc.follow(alice, bob) - await sc.follow(alice, carol) - await sc.follow(alice, dan) - await sc.follow(bob, alice) - await sc.follow(dan, alice) - const one = await sc.post(bob, 'one') - const two = await sc.post(bob, 'two') - const three = await sc.post(carol, 'three') - const four = await sc.post(carol, 'four') - const five = await sc.post(dan, 'five') - const six = await sc.post(dan, 'six') - const seven = await sc.post(sc.dids.user0, 'seven') - const eight = await sc.post(sc.dids.user0, 'eight') - const nine = await sc.post(sc.dids.user1, 'nine') - const ten = await sc.post(sc.dids.user1, 'ten') - - // 1, 2, 3, 4, 6, 8, 10 hit like threshold - await hitLikeThreshold(one.ref) - await hitLikeThreshold(two.ref) - await hitLikeThreshold(three.ref) - await hitLikeThreshold(four.ref) - await hitLikeThreshold(six.ref) - await hitLikeThreshold(eight.ref) - await hitLikeThreshold(ten.ref) - - // 1, 4, 7, 8, 10 liked by mutual - await sc.like(bob, one.ref) - await sc.like(dan, four.ref) - await sc.like(bob, seven.ref) - await sc.like(dan, eight.ref) - await sc.like(bob, nine.ref) - await sc.like(dan, ten.ref) - - // all liked by non-mutual - await sc.like(carol, one.ref) - await sc.like(carol, two.ref) - await sc.like(carol, three.ref) - await sc.like(carol, four.ref) - await sc.like(carol, five.ref) - await sc.like(carol, six.ref) - await sc.like(carol, seven.ref) - await sc.like(carol, eight.ref) - await sc.like(carol, nine.ref) - await sc.like(carol, ten.ref) - - await network.bsky.processAll() - - expectedFeed = [ - ten.ref.uriStr, - eight.ref.uriStr, - four.ref.uriStr, - one.ref.uriStr, - ] - }) - - it('returns popular in & out of network posts', async () => { - const res = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri }, - { headers: await network.serviceHeaders(alice) }, - ) - const feedUris = res.data.feed.map((i) => i.post.uri) - expect(feedUris).toEqual(expectedFeed) - }) - - it('paginates', async () => { - const res = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri }, - { headers: await network.serviceHeaders(alice) }, - ) - const first = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri, limit: 2 }, - { headers: await network.serviceHeaders(alice) }, - ) - const second = await agent.api.app.bsky.feed.getFeed( - { feed: feedUri, cursor: first.data.cursor }, - { headers: await network.serviceHeaders(alice) }, - ) - - expect([...first.data.feed, ...second.data.feed]).toEqual(res.data.feed) - }) -}) diff --git a/packages/bsky/tests/auto-moderator/fixtures/hiveai_resp_example.json b/packages/bsky/tests/auto-moderator/fixtures/hiveai_resp_example.json deleted file mode 100644 index 2315fa9d0c0..00000000000 --- a/packages/bsky/tests/auto-moderator/fixtures/hiveai_resp_example.json +++ /dev/null @@ -1,401 +0,0 @@ -{ - "id": "02122580-c37f-11ed-81d2-000000000000", - "code": 200, - "project_id": 12345, - "user_id": 12345, - "created_on": "2023-03-15T22:16:18.408Z", - "status": [ - { - "status": { - "code": "0", - "message": "SUCCESS" - }, - "response": { - "input": { - "id": "02122580-c37f-11ed-81d2-000000000000", - "charge": 0.003, - "model": "mod55_dense", - "model_version": 1, - "model_type": "CATEGORIZATION", - "created_on": "2023-03-15T22:16:18.136Z", - "media": { - "url": null, - "filename": "bafkreiam7k6mvkyuoybq4ynhljvj5xa75sdbhjbolzjf5j2udx7vj5gnsy", - "type": "PHOTO", - "mime_type": "jpeg", - "mimetype": "image/jpeg", - "width": 800, - "height": 800, - "num_frames": 1, - "duration": 0 - }, - "user_id": 12345, - "project_id": 12345, - "config_version": 1, - "config_tag": "default" - }, - "output": [ - { - "time": 0, - "classes": [ - { - "class": "general_not_nsfw_not_suggestive", - "score": 0.9998097218132356 - }, - { - "class": "general_nsfw", - "score": 8.857344804177162e-5 - }, - { - "class": "general_suggestive", - "score": 0.00010170473872266839 - }, - { - "class": "no_female_underwear", - "score": 0.9999923079040384 - }, - { - "class": "yes_female_underwear", - "score": 7.692095961599136e-6 - }, - { - "class": "no_male_underwear", - "score": 0.9999984904867634 - }, - { - "class": "yes_male_underwear", - "score": 1.5095132367094679e-6 - }, - { - "class": "no_sex_toy", - "score": 0.9999970970762551 - }, - { - "class": "yes_sex_toy", - "score": 2.9029237450490604e-6 - }, - { - "class": "no_female_nudity", - "score": 0.9999739028909301 - }, - { - "class": "yes_female_nudity", - "score": 2.60971090699536e-5 - }, - { - "class": "no_male_nudity", - "score": 0.9999711373083747 - }, - { - "class": "yes_male_nudity", - "score": 2.8862691625255323e-5 - }, - { - "class": "no_female_swimwear", - "score": 0.9999917609899659 - }, - { - "class": "yes_female_swimwear", - "score": 8.239010034025379e-6 - }, - { - "class": "no_male_shirtless", - "score": 0.9999583350744331 - }, - { - "class": "yes_male_shirtless", - "score": 4.166492556688088e-5 - }, - { - "class": "no_text", - "score": 0.9958378716447616 - }, - { - "class": "text", - "score": 0.0041621283552384265 - }, - { - "class": "animated", - "score": 0.46755478950048235 - }, - { - "class": "hybrid", - "score": 0.0011440363434524984 - }, - { - "class": "natural", - "score": 0.5313011741560651 - }, - { - "class": "animated_gun", - "score": 2.0713000782979496e-5 - }, - { - "class": "gun_in_hand", - "score": 1.5844730446534659e-6 - }, - { - "class": "gun_not_in_hand", - "score": 1.0338973818006654e-6 - }, - { - "class": "no_gun", - "score": 0.9999766686287906 - }, - { - "class": "culinary_knife_in_hand", - "score": 3.8063500083369785e-6 - }, - { - "class": "culinary_knife_not_in_hand", - "score": 7.94057948996249e-7 - }, - { - "class": "knife_in_hand", - "score": 4.5578955723278505e-7 - }, - { - "class": "knife_not_in_hand", - "score": 3.842124714748908e-7 - }, - { - "class": "no_knife", - "score": 0.999994559590014 - }, - { - "class": "a_little_bloody", - "score": 2.1317745626539786e-7 - }, - { - "class": "no_blood", - "score": 0.9999793341236429 - }, - { - "class": "other_blood", - "score": 2.0322054269591763e-5 - }, - { - "class": "very_bloody", - "score": 1.306446309561673e-7 - }, - { - "class": "no_pills", - "score": 0.9999989592376954 - }, - { - "class": "yes_pills", - "score": 1.0407623044588633e-6 - }, - { - "class": "no_smoking", - "score": 0.9999939101969173 - }, - { - "class": "yes_smoking", - "score": 6.089803082758281e-6 - }, - { - "class": "illicit_injectables", - "score": 6.925695592003094e-7 - }, - { - "class": "medical_injectables", - "score": 8.587808234452378e-7 - }, - { - "class": "no_injectables", - "score": 0.9999984486496174 - }, - { - "class": "no_nazi", - "score": 0.9999987449628097 - }, - { - "class": "yes_nazi", - "score": 1.2550371902234279e-6 - }, - { - "class": "no_kkk", - "score": 0.999999762417549 - }, - { - "class": "yes_kkk", - "score": 2.3758245111050425e-7 - }, - { - "class": "no_middle_finger", - "score": 0.9999881515231847 - }, - { - "class": "yes_middle_finger", - "score": 1.184847681536747e-5 - }, - { - "class": "no_terrorist", - "score": 0.9999998870793229 - }, - { - "class": "yes_terrorist", - "score": 1.1292067715380635e-7 - }, - { - "class": "no_overlay_text", - "score": 0.9996453363440359 - }, - { - "class": "yes_overlay_text", - "score": 0.0003546636559640924 - }, - { - "class": "no_sexual_activity", - "score": 0.9999563580374798 - }, - { - "class": "yes_sexual_activity", - "score": 0.99, - "realScore": 4.364196252012032e-5 - }, - { - "class": "hanging", - "score": 3.6435135762510905e-7 - }, - { - "class": "no_hanging_no_noose", - "score": 0.9999980779196416 - }, - { - "class": "noose", - "score": 1.5577290007796094e-6 - }, - { - "class": "no_realistic_nsfw", - "score": 0.9999944341007805 - }, - { - "class": "yes_realistic_nsfw", - "score": 5.565899219571182e-6 - }, - { - "class": "animated_corpse", - "score": 5.276802046755426e-7 - }, - { - "class": "human_corpse", - "score": 2.5449360984211012e-8 - }, - { - "class": "no_corpse", - "score": 0.9999994468704343 - }, - { - "class": "no_self_harm", - "score": 0.9999994515625507 - }, - { - "class": "yes_self_harm", - "score": 5.484374493605692e-7 - }, - { - "class": "no_drawing", - "score": 0.9978276028816608 - }, - { - "class": "yes_drawing", - "score": 0.0021723971183392485 - }, - { - "class": "no_emaciated_body", - "score": 0.9999998146500432 - }, - { - "class": "yes_emaciated_body", - "score": 1.853499568724518e-7 - }, - { - "class": "no_child_present", - "score": 0.9999970498515446 - }, - { - "class": "yes_child_present", - "score": 2.950148455380443e-6 - }, - { - "class": "no_sexual_intent", - "score": 0.9999963861546292 - }, - { - "class": "yes_sexual_intent", - "score": 3.613845370766111e-6 - }, - { - "class": "animal_genitalia_and_human", - "score": 2.255472023465222e-8 - }, - { - "class": "animal_genitalia_only", - "score": 4.6783185199931176e-7 - }, - { - "class": "animated_animal_genitalia", - "score": 6.707857419436447e-7 - }, - { - "class": "no_animal_genitalia", - "score": 0.9999988388276858 - }, - { - "class": "no_gambling", - "score": 0.9999960939687145 - }, - { - "class": "yes_gambling", - "score": 3.906031285604864e-6 - }, - { - "class": "no_undressed", - "score": 0.99999923356218 - }, - { - "class": "yes_undressed", - "score": 7.664378199789045e-7 - }, - { - "class": "no_confederate", - "score": 0.9999925456900376 - }, - { - "class": "yes_confederate", - "score": 7.454309962453175e-6 - }, - { - "class": "animated_alcohol", - "score": 1.8109949948066074e-6 - }, - { - "class": "no_alcohol", - "score": 0.9999916620957963 - }, - { - "class": "yes_alcohol", - "score": 5.88781463445443e-6 - }, - { - "class": "yes_drinking_alcohol", - "score": 6.390945746578106e-7 - }, - { - "class": "no_religious_icon", - "score": 0.9999862158580689 - }, - { - "class": "yes_religious_icon", - "score": 1.3784141931119298e-5 - } - ] - } - ] - } - } - ], - "from_cache": false -} diff --git a/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts b/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts deleted file mode 100644 index 60fe50d582d..00000000000 --- a/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts +++ /dev/null @@ -1,165 +0,0 @@ -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import { FuzzyMatcher, encode } from '../../src/auto-moderator/fuzzy-matcher' -import basicSeed from '../seeds/basic' -import { AtpAgent } from '@atproto/api' -import { ImageInvalidator } from '../../src/image/invalidator' - -describe('fuzzy matcher', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - let fuzzyMatcher: FuzzyMatcher - - let alice: string - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'fuzzy_matcher', - bsky: { - imgInvalidator: new NoopInvalidator(), - indexer: { - fuzzyMatchB64: encode(['evil']), - }, - }, - }) - fuzzyMatcher = new FuzzyMatcher(['evil', 'mean', 'bad'], ['baddie']) - agent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - alice = sc.dids.alice - }) - - afterAll(async () => { - await network.close() - }) - - const getAllReports = () => { - return network.bsky.ctx.db - .getPrimary() - .db.selectFrom('moderation_event') - .where('action', '=', 'com.atproto.admin.defs#modEventReport') - .selectAll() - .orderBy('id', 'asc') - .execute() - } - - it('identifies fuzzy matches', () => { - expect(fuzzyMatcher.getMatches('evil.john.test')).toMatchObject(['evil']) - expect(fuzzyMatcher.getMatches('john.evil.test')).toMatchObject(['evil']) - expect(fuzzyMatcher.getMatches('john.test.evil')).toMatchObject(['evil']) - expect(fuzzyMatcher.getMatches('ev1l.test.john')).toMatchObject(['evil']) - expect(fuzzyMatcher.getMatches('ev-1l.test.john')).toMatchObject(['evil']) - expect(fuzzyMatcher.getMatches('ev-11.test.john')).toMatchObject(['evil']) - expect(fuzzyMatcher.getMatches('ev.-1.l-test.john')).toMatchObject(['evil']) - }) - - it('identifies fuzzy false positivies', () => { - expect(fuzzyMatcher.getMatches('john.test')).toHaveLength(0) - expect(fuzzyMatcher.getMatches('good.john.test')).toHaveLength(0) - expect(fuzzyMatcher.getMatches('john.baddie.test')).toHaveLength(0) - }) - - it('doesnt label any of the content in the seed', async () => { - const reports = await getAllReports() - expect(reports.length).toBe(0) - }) - - it('flags a handle with an unacceptable word', async () => { - await sc.updateHandle(alice, 'evil.test') - await network.processAll() - const reports = await getAllReports() - expect(reports.length).toBe(1) - expect(reports.at(-1)?.subjectDid).toEqual(alice) - }) - - it('flags a profile with an unacceptable displayName', async () => { - const res = await agent.api.com.atproto.repo.putRecord( - { - repo: alice, - collection: 'app.bsky.actor.profile', - rkey: 'self', - record: { - displayName: 'evil alice', - }, - }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - await network.processAll() - - const reports = await getAllReports() - expect(reports.length).toBe(2) - expect(reports.at(-1)?.subjectUri).toEqual(res.data.uri) - expect(reports.at(-1)?.subjectCid).toEqual(res.data.cid) - }) - - it('flags a list with an unacceptable name', async () => { - const res = await agent.api.com.atproto.repo.createRecord( - { - repo: alice, - collection: 'app.bsky.graph.list', - rkey: 'list', - record: { - name: 'myevillist', - purpose: 'app.bsky.graph.defs#modList', - createdAt: new Date().toISOString(), - }, - }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - await network.processAll() - - const reports = await getAllReports() - expect(reports.length).toBe(3) - expect(reports.at(-1)?.subjectUri).toEqual(res.data.uri) - expect(reports.at(-1)?.subjectCid).toEqual(res.data.cid) - }) - - it('flags a feed generator with an unacceptable displayName', async () => { - const res = await agent.api.com.atproto.repo.createRecord( - { - repo: alice, - collection: 'app.bsky.feed.generator', - rkey: 'generator', - record: { - did: alice, - displayName: 'myevilfeed', - createdAt: new Date().toISOString(), - }, - }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - await network.processAll() - - const reports = await getAllReports() - expect(reports.length).toBe(4) - expect(reports.at(-1)?.subjectUri).toEqual(res.data.uri) - expect(reports.at(-1)?.subjectCid).toEqual(res.data.cid) - }) - - it('flags a record with an unacceptable rkey', async () => { - const res = await agent.api.com.atproto.repo.createRecord( - { - repo: alice, - collection: 'app.bsky.feed.generator', - rkey: 'evilrkey', - record: { - did: alice, - displayName: 'totally fine feed', - createdAt: new Date().toISOString(), - }, - }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - await network.processAll() - - const reports = await getAllReports() - expect(reports.length).toBe(5) - expect(reports.at(-1)?.subjectUri).toEqual(res.data.uri) - expect(reports.at(-1)?.subjectCid).toEqual(res.data.cid) - }) -}) - -class NoopInvalidator implements ImageInvalidator { - async invalidate() {} -} diff --git a/packages/bsky/tests/auto-moderator/hive.test.ts b/packages/bsky/tests/auto-moderator/hive.test.ts deleted file mode 100644 index 3a5cef45a37..00000000000 --- a/packages/bsky/tests/auto-moderator/hive.test.ts +++ /dev/null @@ -1,16 +0,0 @@ -import fs from 'fs/promises' -import * as hive from '../../src/auto-moderator/hive' - -describe('labeling', () => { - it('correctly parses hive responses', async () => { - const exampleRespBytes = await fs.readFile( - 'tests/auto-moderator/fixtures/hiveai_resp_example.json', - ) - const exampleResp = JSON.parse(exampleRespBytes.toString()) - const classes = hive.respToClasses(exampleResp) - expect(classes.length).toBeGreaterThan(10) - - const labels = hive.summarizeLabels(classes) - expect(labels).toEqual(['porn']) - }) -}) diff --git a/packages/bsky/tests/auto-moderator/labeler.test.ts b/packages/bsky/tests/auto-moderator/labeler.test.ts deleted file mode 100644 index 3687a360980..00000000000 --- a/packages/bsky/tests/auto-moderator/labeler.test.ts +++ /dev/null @@ -1,167 +0,0 @@ -import { TestNetwork } from '@atproto/dev-env' -import { AtUri, BlobRef } from '@atproto/api' -import { Readable } from 'stream' -import { AutoModerator } from '../../src/auto-moderator' -import IndexerContext from '../../src/indexer/context' -import { cidForRecord } from '@atproto/repo' -import { TID } from '@atproto/common' -import { LabelService } from '../../src/services/label' -import usersSeed from '../seeds/users' -import { CID } from 'multiformats/cid' -import { ImgLabeler } from '../../src/auto-moderator/hive' - -// outside of test suite so that TestLabeler can access them -let badCid1: CID | undefined = undefined -let badCid2: CID | undefined = undefined - -describe('labeler', () => { - let network: TestNetwork - let autoMod: AutoModerator - let labelSrvc: LabelService - let ctx: IndexerContext - let labelerDid: string - let badBlob1: BlobRef - let badBlob2: BlobRef - let goodBlob: BlobRef - let alice: string - const postUri = () => AtUri.make(alice, 'app.bsky.feed.post', TID.nextStr()) - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_labeler', - }) - ctx = network.bsky.indexer.ctx - const pdsCtx = network.pds.ctx - labelerDid = ctx.cfg.labelerDid - autoMod = ctx.autoMod - autoMod.imgLabeler = new TestImgLabeler() - labelSrvc = ctx.services.label(ctx.db) - const sc = network.getSeedClient() - await usersSeed(sc) - await network.processAll() - alice = sc.dids.alice - const storeBlob = (bytes: Uint8Array) => { - return pdsCtx.actorStore.transact(alice, async (store) => { - const blobRef = await store.repo.blob.addUntetheredBlob( - 'image/jpeg', - Readable.from([bytes], { objectMode: false }), - ) - const preparedBlobRef = { - cid: blobRef.ref, - mimeType: 'image/jpeg', - constraints: {}, - } - await store.repo.blob.verifyBlobAndMakePermanent(preparedBlobRef) - await store.repo.blob.associateBlob( - preparedBlobRef, - postUri(), - TID.nextStr(), - ) - return blobRef - }) - } - const bytes1 = new Uint8Array([1, 2, 3, 4]) - const bytes2 = new Uint8Array([5, 6, 7, 8]) - const bytes3 = new Uint8Array([4, 3, 2, 1]) - badBlob1 = await storeBlob(bytes1) - badBlob2 = await storeBlob(bytes2) - goodBlob = await storeBlob(bytes3) - badCid1 = badBlob1.ref - badCid2 = badBlob2.ref - }) - - afterAll(async () => { - await network.close() - }) - - it('labels text in posts', async () => { - const post = { - $type: 'app.bsky.feed.post', - text: 'blah blah label_me', - createdAt: new Date().toISOString(), - } - const cid = await cidForRecord(post) - const uri = postUri() - autoMod.processRecord(uri, cid, post) - await autoMod.processAll() - const labels = await labelSrvc.getLabels(uri.toString()) - expect(labels.length).toBe(1) - expect(labels[0]).toMatchObject({ - src: labelerDid, - uri: uri.toString(), - cid: cid.toString(), - val: 'test-label', - neg: false, - }) - }) - - it('labels embeds in posts', async () => { - const post = { - $type: 'app.bsky.feed.post', - text: 'blah blah', - embed: { - $type: 'app.bsky.embed.images', - images: [ - { - image: badBlob1, - alt: 'img', - }, - { - image: badBlob2, - alt: 'label_me_2', - }, - { - image: goodBlob, - alt: 'img', - }, - ], - }, - createdAt: new Date().toISOString(), - } - const uri = postUri() - const cid = await cidForRecord(post) - autoMod.processRecord(uri, cid, post) - await autoMod.processAll() - const dbLabels = await labelSrvc.getLabels(uri.toString()) - const labels = dbLabels.map((row) => row.val).sort() - expect(labels).toEqual( - ['test-label', 'test-label-2', 'img-label', 'other-img-label'].sort(), - ) - }) - - it('retrieves repo labels on profile views', async () => { - await ctx.db.db - .insertInto('label') - .values({ - src: labelerDid, - uri: alice, - cid: '', - val: 'repo-label', - neg: false, - cts: new Date().toISOString(), - }) - .execute() - - const labels = await labelSrvc.getLabelsForProfile(alice) - - expect(labels.length).toBe(1) - expect(labels[0]).toMatchObject({ - src: labelerDid, - uri: alice, - val: 'repo-label', - neg: false, - }) - }) -}) - -class TestImgLabeler implements ImgLabeler { - async labelImg(_did: string, cid: CID): Promise { - if (cid.equals(badCid1)) { - return ['img-label'] - } - if (cid.equals(badCid2)) { - return ['other-img-label'] - } - return [] - } -} diff --git a/packages/bsky/tests/auto-moderator/takedowns.test.ts b/packages/bsky/tests/auto-moderator/takedowns.test.ts deleted file mode 100644 index 8c1f1a21cdd..00000000000 --- a/packages/bsky/tests/auto-moderator/takedowns.test.ts +++ /dev/null @@ -1,201 +0,0 @@ -import fs from 'fs/promises' -import { TestNetwork, SeedClient, ImageRef } from '@atproto/dev-env' -import { AtpAgent } from '@atproto/api' -import { AutoModerator } from '../../src/auto-moderator' -import IndexerContext from '../../src/indexer/context' -import { sha256RawToCid } from '@atproto/common' -import usersSeed from '../seeds/users' -import { CID } from 'multiformats/cid' -import { AtUri } from '@atproto/syntax' -import { ImageFlagger } from '../../src/auto-moderator/abyss' -import { ImageInvalidator } from '../../src/image/invalidator' -import { sha256 } from '@atproto/crypto' -import { ids } from '../../src/lexicon/lexicons' - -// outside of test suite so that TestLabeler can access them -let badCid1: CID | undefined = undefined -let badCid2: CID | undefined = undefined - -describe('takedowner', () => { - let network: TestNetwork - let autoMod: AutoModerator - let testInvalidator: TestInvalidator - let ctx: IndexerContext - let pdsAgent: AtpAgent - let sc: SeedClient - let alice: string - let badBlob1: ImageRef - let badBlob2: ImageRef - let goodBlob: ImageRef - - beforeAll(async () => { - testInvalidator = new TestInvalidator() - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_automod_takedown', - bsky: { - imgInvalidator: testInvalidator, - }, - }) - ctx = network.bsky.indexer.ctx - autoMod = ctx.autoMod - autoMod.imageFlagger = new TestFlagger() - pdsAgent = new AtpAgent({ service: network.pds.url }) - sc = network.getSeedClient() - await usersSeed(sc) - await network.processAll() - alice = sc.dids.alice - const fileBytes1 = await fs.readFile( - 'tests/sample-img/key-portrait-small.jpg', - ) - const fileBytes2 = await fs.readFile( - 'tests/sample-img/key-portrait-large.jpg', - ) - badCid1 = sha256RawToCid(await sha256(fileBytes1)) - badCid2 = sha256RawToCid(await sha256(fileBytes2)) - goodBlob = await sc.uploadFile( - alice, - 'tests/sample-img/key-landscape-small.jpg', - 'image/jpeg', - ) - badBlob1 = await sc.uploadFile( - alice, - 'tests/sample-img/key-portrait-small.jpg', - 'image/jpeg', - ) - badBlob2 = await sc.uploadFile( - alice, - 'tests/sample-img/key-portrait-large.jpg', - 'image/jpeg', - ) - }) - - afterAll(async () => { - await network.close() - }) - - it('takes down flagged content in posts', async () => { - const post = await sc.post(alice, 'blah', undefined, [goodBlob, badBlob1]) - await network.processAll() - await autoMod.processAll() - const [modStatus, takedownEvent] = await Promise.all([ - ctx.db.db - .selectFrom('moderation_subject_status') - .where('did', '=', alice) - .where( - 'recordPath', - '=', - `${post.ref.uri.collection}/${post.ref.uri.rkey}`, - ) - .select(['takendown', 'id']) - .executeTakeFirst(), - ctx.db.db - .selectFrom('moderation_event') - .where('subjectDid', '=', alice) - .where('action', '=', 'com.atproto.admin.defs#modEventTakedown') - .selectAll() - .executeTakeFirst(), - ]) - if (!modStatus || !takedownEvent) { - throw new Error('expected mod action') - } - expect(modStatus.takendown).toEqual(true) - const record = await ctx.db.db - .selectFrom('record') - .where('uri', '=', post.ref.uriStr) - .select('takedownId') - .executeTakeFirst() - expect(record?.takedownId).toBeGreaterThan(0) - - const recordPds = await network.pds.ctx.actorStore.read( - post.ref.uri.hostname, - (store) => - store.db.db - .selectFrom('record') - .where('uri', '=', post.ref.uriStr) - .select('takedownRef') - .executeTakeFirst(), - ) - expect(recordPds?.takedownRef).toEqual(takedownEvent.id.toString()) - - expect(testInvalidator.invalidated.length).toBe(1) - expect(testInvalidator.invalidated[0].subject).toBe( - badBlob1.image.ref.toString(), - ) - }) - - it('takes down flagged content in profiles', async () => { - const res = await pdsAgent.api.com.atproto.repo.putRecord( - { - repo: alice, - collection: ids.AppBskyActorProfile, - rkey: 'self', - record: { - avatar: badBlob2.image, - }, - }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - await network.processAll() - const [modStatus, takedownEvent] = await Promise.all([ - ctx.db.db - .selectFrom('moderation_subject_status') - .where('did', '=', alice) - .where('recordPath', '=', `${ids.AppBskyActorProfile}/self`) - .select(['takendown', 'id']) - .executeTakeFirst(), - ctx.db.db - .selectFrom('moderation_event') - .where('subjectDid', '=', alice) - .where( - 'subjectUri', - '=', - AtUri.make(alice, ids.AppBskyActorProfile, 'self').toString(), - ) - .where('action', '=', 'com.atproto.admin.defs#modEventTakedown') - .selectAll() - .executeTakeFirst(), - ]) - if (!modStatus || !takedownEvent) { - throw new Error('expected mod action') - } - expect(modStatus.takendown).toEqual(true) - const record = await ctx.db.db - .selectFrom('record') - .where('uri', '=', res.data.uri) - .select('takedownId') - .executeTakeFirst() - expect(record?.takedownId).toBeGreaterThan(0) - - const recordPds = await network.pds.ctx.actorStore.read(alice, (store) => - store.db.db - .selectFrom('record') - .where('uri', '=', res.data.uri) - .select('takedownRef') - .executeTakeFirst(), - ) - expect(recordPds?.takedownRef).toEqual(takedownEvent.id.toString()) - - expect(testInvalidator.invalidated.length).toBe(2) - expect(testInvalidator.invalidated[1].subject).toBe( - badBlob2.image.ref.toString(), - ) - }) -}) - -class TestInvalidator implements ImageInvalidator { - public invalidated: { subject: string; paths: string[] }[] = [] - async invalidate(subject: string, paths: string[]) { - this.invalidated.push({ subject, paths }) - } -} - -class TestFlagger implements ImageFlagger { - async scanImage(_did: string, cid: CID, _uri: AtUri): Promise { - if (cid.equals(badCid1)) { - return ['kill-it'] - } else if (cid.equals(badCid2)) { - return ['with-fire'] - } - return [] - } -} diff --git a/packages/bsky/tests/daemon.test.ts b/packages/bsky/tests/daemon.test.ts deleted file mode 100644 index 32f0d6617ab..00000000000 --- a/packages/bsky/tests/daemon.test.ts +++ /dev/null @@ -1,191 +0,0 @@ -import assert from 'assert' -import { AtUri } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { BskyDaemon, DaemonConfig, PrimaryDatabase } from '../src' -import usersSeed from './seeds/users' -import { countAll, excluded } from '../src/db/util' -import { NotificationsDaemon } from '../src/daemon/notifications' -import { - BEFORE_LAST_SEEN_DAYS, - BEFORE_LATEST_UNREAD_DAYS, - UNREAD_KEPT_COUNT, -} from '../src/services/util/notification' - -describe('daemon', () => { - let network: TestNetwork - let daemon: BskyDaemon - let db: PrimaryDatabase - let actors: { did: string }[] = [] - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_daemon', - }) - db = network.bsky.ctx.db.getPrimary() - daemon = BskyDaemon.create({ - db, - cfg: new DaemonConfig({ - version: network.bsky.ctx.cfg.version, - dbPostgresUrl: network.bsky.ctx.cfg.dbPrimaryPostgresUrl, - dbPostgresSchema: network.bsky.ctx.cfg.dbPostgresSchema, - }), - }) - const sc = network.getSeedClient() - await usersSeed(sc) - await network.processAll() - actors = await db.db.selectFrom('actor').selectAll().execute() - }) - - afterAll(async () => { - await network.close() - }) - - describe('notifications daemon', () => { - it('processes all dids', async () => { - for (const { did } of actors) { - await Promise.all([ - setLastSeen(daemon.ctx.db, { did }), - createNotifications(daemon.ctx.db, { - did, - daysAgo: 2 * BEFORE_LAST_SEEN_DAYS, - count: 1, - }), - ]) - } - await expect(countNotifications(db)).resolves.toBe(actors.length) - await runNotifsOnce(daemon.notifications) - await expect(countNotifications(db)).resolves.toBe(0) - }) - - it('removes read notifications older than threshold.', async () => { - const { did } = actors[0] - const lastSeenDaysAgo = 10 - await Promise.all([ - setLastSeen(daemon.ctx.db, { did, daysAgo: lastSeenDaysAgo }), - // read, delete - createNotifications(daemon.ctx.db, { - did, - daysAgo: lastSeenDaysAgo + BEFORE_LAST_SEEN_DAYS + 1, - count: 2, - }), - // read, keep - createNotifications(daemon.ctx.db, { - did, - daysAgo: lastSeenDaysAgo + BEFORE_LAST_SEEN_DAYS - 1, - count: 3, - }), - // unread, keep - createNotifications(daemon.ctx.db, { - did, - daysAgo: lastSeenDaysAgo - 1, - count: 4, - }), - ]) - await expect(countNotifications(db)).resolves.toBe(9) - await runNotifsOnce(daemon.notifications) - await expect(countNotifications(db)).resolves.toBe(7) - await clearNotifications(db) - }) - - it('removes unread notifications older than threshold.', async () => { - const { did } = actors[0] - await Promise.all([ - setLastSeen(daemon.ctx.db, { - did, - daysAgo: 2 * BEFORE_LATEST_UNREAD_DAYS, // all are unread - }), - createNotifications(daemon.ctx.db, { - did, - daysAgo: 0, - count: 1, - }), - createNotifications(daemon.ctx.db, { - did, - daysAgo: BEFORE_LATEST_UNREAD_DAYS - 1, - count: 99, - }), - createNotifications(daemon.ctx.db, { - did, - daysAgo: BEFORE_LATEST_UNREAD_DAYS + 1, - count: 400, - }), - ]) - await expect(countNotifications(db)).resolves.toBe(UNREAD_KEPT_COUNT) - await runNotifsOnce(daemon.notifications) - // none removed when within UNREAD_KEPT_COUNT - await expect(countNotifications(db)).resolves.toBe(UNREAD_KEPT_COUNT) - // add one more, tip over UNREAD_KEPT_COUNT - await createNotifications(daemon.ctx.db, { - did, - daysAgo: BEFORE_LATEST_UNREAD_DAYS + 1, - count: 1, - }) - await runNotifsOnce(daemon.notifications) - // removed all older than BEFORE_LATEST_UNREAD_DAYS - await expect(countNotifications(db)).resolves.toBe(100) - await clearNotifications(db) - }) - }) - - const runNotifsOnce = async (notifsDaemon: NotificationsDaemon) => { - assert(!notifsDaemon.running, 'notifications daemon is already running') - notifsDaemon.run({ forever: false, batchSize: 2 }) - await notifsDaemon.running - } - - const setLastSeen = async ( - db: PrimaryDatabase, - opts: { did: string; daysAgo?: number }, - ) => { - const { did, daysAgo = 0 } = opts - const lastSeenAt = new Date() - lastSeenAt.setDate(lastSeenAt.getDate() - daysAgo) - await db.db - .insertInto('actor_state') - .values({ did, lastSeenNotifs: lastSeenAt.toISOString() }) - .onConflict((oc) => - oc.column('did').doUpdateSet({ - lastSeenNotifs: excluded(db.db, 'lastSeenNotifs'), - }), - ) - .execute() - } - - const createNotifications = async ( - db: PrimaryDatabase, - opts: { - did: string - count: number - daysAgo: number - }, - ) => { - const { did, count, daysAgo } = opts - const sortAt = new Date() - sortAt.setDate(sortAt.getDate() - daysAgo) - await db.db - .insertInto('notification') - .values( - [...Array(count)].map(() => ({ - did, - author: did, - reason: 'none', - recordCid: 'bafycid', - recordUri: AtUri.make(did, 'invalid.collection', 'self').toString(), - sortAt: sortAt.toISOString(), - })), - ) - .execute() - } - - const clearNotifications = async (db: PrimaryDatabase) => { - await db.db.deleteFrom('notification').execute() - } - - const countNotifications = async (db: PrimaryDatabase) => { - const { count } = await db.db - .selectFrom('notification') - .select(countAll.as('count')) - .executeTakeFirstOrThrow() - return count - } -}) diff --git a/packages/bsky/tests/notification-server.test.ts b/packages/bsky/tests/notification-server.test.ts deleted file mode 100644 index 6f9c8b00224..00000000000 --- a/packages/bsky/tests/notification-server.test.ts +++ /dev/null @@ -1,231 +0,0 @@ -import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import basicSeed from './seeds/basic' -import { NotificationServer } from '../src/notifications' -import { Database } from '../src' - -describe('notification server', () => { - let network: TestNetwork - let agent: AtpAgent - let pdsAgent: AtpAgent - let sc: SeedClient - let notifServer: NotificationServer - - // account dids, for convenience - let alice: string - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_notification_server', - }) - agent = network.bsky.getClient() - pdsAgent = network.pds.getClient() - sc = network.getSeedClient() - await basicSeed(sc) - await network.processAll() - await network.bsky.processAll() - alice = sc.dids.alice - notifServer = network.bsky.ctx.notifServer - }) - - afterAll(async () => { - await network.close() - }) - - describe('registerPushNotification', () => { - it('registers push notification token and device.', async () => { - const res = await agent.api.app.bsky.notification.registerPush( - { - serviceDid: network.bsky.ctx.cfg.serverDid, - platform: 'ios', - token: '123', - appId: 'xyz.blueskyweb.app', - }, - { - encoding: 'application/json', - headers: await network.serviceHeaders(alice), - }, - ) - expect(res.success).toEqual(true) - }) - - it('allows reregistering push notification token.', async () => { - const res1 = await agent.api.app.bsky.notification.registerPush( - { - serviceDid: network.bsky.ctx.cfg.serverDid, - platform: 'web', - token: '234', - appId: 'xyz.blueskyweb.app', - }, - { - encoding: 'application/json', - headers: await network.serviceHeaders(alice), - }, - ) - const res2 = await agent.api.app.bsky.notification.registerPush( - { - serviceDid: network.bsky.ctx.cfg.serverDid, - platform: 'web', - token: '234', - appId: 'xyz.blueskyweb.app', - }, - { - encoding: 'application/json', - headers: await network.serviceHeaders(alice), - }, - ) - expect(res1.success).toEqual(true) - expect(res2.success).toEqual(true) - }) - - it('does not allows registering push notification at mismatching service.', async () => { - const tryRegister = agent.api.app.bsky.notification.registerPush( - { - serviceDid: 'did:web:notifservice.com', - platform: 'ios', - token: '123', - appId: 'xyz.blueskyweb.app', - }, - { - encoding: 'application/json', - headers: await network.serviceHeaders(alice), - }, - ) - await expect(tryRegister).rejects.toThrow('Invalid serviceDid.') - }) - }) - - describe('NotificationServer', () => { - it('gets user tokens from db', async () => { - const tokens = await notifServer.getTokensByDid([alice]) - expect(tokens[alice][0].token).toEqual('123') - }) - - it('gets notification display attributes: title and body', async () => { - const db = network.bsky.ctx.db.getPrimary() - const notif = await getLikeNotification(db, alice) - if (!notif) throw new Error('no notification found') - const attrs = await notifServer.getNotificationDisplayAttributes([notif]) - if (!attrs.length) - throw new Error('no notification display attributes found') - expect(attrs[0].title).toEqual('bobby liked your post') - }) - - it('filters notifications that violate blocks', async () => { - const db = network.bsky.ctx.db.getPrimary() - const notif = await getLikeNotification(db, alice) - if (!notif) throw new Error('no notification found') - const blockRef = await pdsAgent.api.app.bsky.graph.block.create( - { repo: alice }, - { subject: notif.author, createdAt: new Date().toISOString() }, - sc.getHeaders(alice), - ) - await network.processAll() - // verify inverse of block - const flippedNotif = { - ...notif, - did: notif.author, - author: notif.did, - } - const attrs = await notifServer.getNotificationDisplayAttributes([ - notif, - flippedNotif, - ]) - expect(attrs.length).toBe(0) - const uri = new AtUri(blockRef.uri) - await pdsAgent.api.app.bsky.graph.block.delete( - { repo: alice, rkey: uri.rkey }, - sc.getHeaders(alice), - ) - await network.processAll() - }) - - it('filters notifications that violate mutes', async () => { - const db = network.bsky.ctx.db.getPrimary() - const notif = await getLikeNotification(db, alice) - if (!notif) throw new Error('no notification found') - await pdsAgent.api.app.bsky.graph.muteActor( - { actor: notif.author }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - const attrs = await notifServer.getNotificationDisplayAttributes([notif]) - expect(attrs.length).toBe(0) - await pdsAgent.api.app.bsky.graph.unmuteActor( - { actor: notif.author }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - }) - - it('filters notifications that violate mutelists', async () => { - const db = network.bsky.ctx.db.getPrimary() - const notif = await getLikeNotification(db, alice) - if (!notif) throw new Error('no notification found') - const listRef = await pdsAgent.api.app.bsky.graph.list.create( - { repo: alice }, - { - name: 'mute', - purpose: 'app.bsky.graph.defs#modlist', - createdAt: new Date().toISOString(), - }, - sc.getHeaders(alice), - ) - await pdsAgent.api.app.bsky.graph.listitem.create( - { repo: alice }, - { - subject: notif.author, - list: listRef.uri, - createdAt: new Date().toISOString(), - }, - sc.getHeaders(alice), - ) - await network.processAll() - await pdsAgent.api.app.bsky.graph.muteActorList( - { list: listRef.uri }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - const attrs = await notifServer.getNotificationDisplayAttributes([notif]) - expect(attrs.length).toBe(0) - await pdsAgent.api.app.bsky.graph.unmuteActorList( - { list: listRef.uri }, - { headers: sc.getHeaders(alice), encoding: 'application/json' }, - ) - }) - - it('prepares notification to be sent', async () => { - const db = network.bsky.ctx.db.getPrimary() - const notif = await getLikeNotification(db, alice) - if (!notif) throw new Error('no notification found') - const notifAsArray = [ - notif, - notif /* second one will get dropped by rate limit */, - ] - const prepared = await notifServer.prepareNotifsToSend(notifAsArray) - expect(prepared).toEqual([ - { - collapse_id: 'like', - collapse_key: 'like', - data: { - reason: notif.reason, - recordCid: notif.recordCid, - recordUri: notif.recordUri, - }, - message: 'again', - platform: 1, - title: 'bobby liked your post', - tokens: ['123'], - topic: 'xyz.blueskyweb.app', - }, - ]) - }) - }) - - async function getLikeNotification(db: Database, did: string) { - return await db.db - .selectFrom('notification') - .selectAll() - .where('did', '=', did) - .where('reason', '=', 'like') - .orderBy('sortAt') - .executeTakeFirst() - } -}) diff --git a/packages/bsky/tests/pipeline/backpressure.test.ts b/packages/bsky/tests/pipeline/backpressure.test.ts deleted file mode 100644 index 583d749100e..00000000000 --- a/packages/bsky/tests/pipeline/backpressure.test.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { wait } from '@atproto/common' -import { - BskyIndexers, - TestNetworkNoAppView, - getIndexers, - getIngester, - processAll, - SeedClient, -} from '@atproto/dev-env' -import basicSeed from '../seeds/basic' -import { BskyIngester } from '../../src' - -const TEST_NAME = 'pipeline_backpressure' - -describe('pipeline backpressure', () => { - let network: TestNetworkNoAppView - let ingester: BskyIngester - let indexers: BskyIndexers - - let sc: SeedClient - - beforeAll(async () => { - network = await TestNetworkNoAppView.create({ - dbPostgresSchema: TEST_NAME, - }) - ingester = await getIngester(network, { - name: TEST_NAME, - ingesterPartitionCount: 2, - ingesterMaxItems: 10, - ingesterCheckItemsEveryN: 5, - }) - indexers = await getIndexers(network, { - name: TEST_NAME, - partitionIdsByIndexer: [[0], [1]], - }) - sc = network.getSeedClient() - await basicSeed(sc) - }) - - afterAll(async () => { - await network.close() - }) - - it('ingester issues backpressure based on total of partition lengths.', async () => { - // ingest until first 10 are seen - await ingester.start() - while ((ingester.sub.lastSeq ?? 0) < 10) { - await wait(50) - } - // allow additional time to pass to ensure no additional events are being consumed - await wait(200) - // check that max items has been respected (i.e. backpressure was applied) - const lengths = await ingester.ctx.redis.streamLengths(['repo:0', 'repo:1']) - expect(lengths).toHaveLength(2) - expect(lengths[0] + lengths[1]).toBeLessThanOrEqual(10 + 5) // not exact due to batching, may catch on following check backpressure - // drain all items using indexers, releasing backpressure - await indexers.start() - await processAll(network, ingester) - const lengthsFinal = await ingester.ctx.redis.streamLengths([ - 'repo:0', - 'repo:1', - ]) - expect(lengthsFinal).toHaveLength(2) - expect(lengthsFinal[0] + lengthsFinal[1]).toEqual(0) - await indexers.destroy() - await ingester.destroy() - }) -}) diff --git a/packages/bsky/tests/pipeline/reingest.test.ts b/packages/bsky/tests/pipeline/reingest.test.ts deleted file mode 100644 index 3c860bcf680..00000000000 --- a/packages/bsky/tests/pipeline/reingest.test.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { - TestNetworkNoAppView, - SeedClient, - getIngester, - ingestAll, -} from '@atproto/dev-env' -import basicSeed from '../seeds/basic' -import { BskyIngester } from '../../src' - -const TEST_NAME = 'pipeline_reingest' - -describe('pipeline reingestion', () => { - let network: TestNetworkNoAppView - let ingester: BskyIngester - let sc: SeedClient - - beforeAll(async () => { - network = await TestNetworkNoAppView.create({ - dbPostgresSchema: TEST_NAME, - }) - ingester = await getIngester(network, { - name: TEST_NAME, - ingesterPartitionCount: 1, - }) - sc = network.getSeedClient() - await basicSeed(sc) - }) - - afterAll(async () => { - await network.close() - await ingester.destroy() - }) - - it('allows events to be reingested multiple times.', async () => { - // ingest all events once - await ingester.start() - await ingestAll(network, ingester) - const initialCursor = await ingester.sub.getCursor() - const [initialLen] = await ingester.ctx.redis.streamLengths(['repo:0']) - expect(initialCursor).toBeGreaterThan(10) - expect(initialLen).toBeGreaterThan(10) - // stop ingesting and reset ingester state - await ingester.sub.destroy() - await ingester.sub.resetCursor() - // add one new event and reingest - await sc.post(sc.dids.alice, 'one more event!') // add one event to firehose - ingester.sub.resume() - await ingestAll(network, ingester) - // confirm the newest event was ingested - const finalCursor = await ingester.sub.getCursor() - const [finalLen] = await ingester.ctx.redis.streamLengths(['repo:0']) - expect(finalCursor).toEqual(initialCursor + 1) - expect(finalLen).toEqual(initialLen + 1) - }) -}) diff --git a/packages/bsky/tests/pipeline/repartition.test.ts b/packages/bsky/tests/pipeline/repartition.test.ts deleted file mode 100644 index f228b954fb6..00000000000 --- a/packages/bsky/tests/pipeline/repartition.test.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { - BskyIndexers, - TestNetworkNoAppView, - SeedClient, - getIndexers, - getIngester, - ingestAll, - processAll, -} from '@atproto/dev-env' -import usersSeed from '../seeds/users' -import { BskyIngester } from '../../src' -import { countAll } from '../../src/db/util' - -const TEST_NAME = 'pipeline_repartition' - -describe('pipeline indexer repartitioning', () => { - let network: TestNetworkNoAppView - let ingester: BskyIngester - let indexers1: BskyIndexers - let indexers2: BskyIndexers - let sc: SeedClient - - beforeAll(async () => { - network = await TestNetworkNoAppView.create({ - dbPostgresSchema: TEST_NAME, - }) - ingester = await getIngester(network, { - name: TEST_NAME, - ingesterPartitionCount: 2, - }) - indexers1 = await getIndexers(network, { - name: TEST_NAME, - partitionIdsByIndexer: [[0, 1]], // one indexer consuming two partitions - }) - indexers2 = await getIndexers(network, { - name: TEST_NAME, - partitionIdsByIndexer: [[0], [1]], // two indexers, each consuming one partition - }) - sc = network.getSeedClient() - await usersSeed(sc) - }) - - afterAll(async () => { - await network.close() - }) - - it('indexers repartition without missing events.', async () => { - const poster = createPoster(sc) - await Promise.all([poster.post(4), indexers1.start(), ingester.start()]) - await poster.post(1) - await processAll(network, ingester) - const { count: indexedPosts } = await indexers1.db.db - .selectFrom('post') - .select(countAll.as('count')) - .executeTakeFirstOrThrow() - expect(indexedPosts).toEqual(5) - await Promise.all([poster.post(3), indexers1.destroy()]) - await poster.post(3) // miss some events - await ingestAll(network, ingester) - await Promise.all([poster.post(3), indexers2.start()]) // handle some events on indexers2 - await processAll(network, ingester) - const { count: allIndexedPosts } = await indexers2.db.db - .selectFrom('post') - .select(countAll.as('count')) - .executeTakeFirstOrThrow() - expect(allIndexedPosts).toBeGreaterThan(indexedPosts) - expect(allIndexedPosts).toEqual(poster.postCount) - await indexers2.destroy() - await ingester.destroy() - }) -}) - -function createPoster(sc: SeedClient) { - return { - postCount: 0, - destroyed: false, - async post(n = 1) { - const dids = Object.values(sc.dids) - for (let i = 0; i < n; ++i) { - const did = dids[this.postCount % dids.length] - await sc.post(did, `post ${this.postCount}`) - this.postCount++ - } - }, - } -} diff --git a/packages/bsky/tests/reprocessing.test.ts b/packages/bsky/tests/reprocessing.test.ts deleted file mode 100644 index 046bc58076b..00000000000 --- a/packages/bsky/tests/reprocessing.test.ts +++ /dev/null @@ -1,71 +0,0 @@ -import axios from 'axios' -import { AtUri } from '@atproto/syntax' -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import basicSeed from './seeds/basic' -import { Database } from '../src/db' - -describe('reprocessing', () => { - let network: TestNetwork - let sc: SeedClient - let alice: string - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_reprocessing', - }) - sc = network.getSeedClient() - await basicSeed(sc) - alice = sc.dids.alice - await network.processAll() - }) - - afterAll(async () => { - await network.close() - }) - - const getRecordUris = async (db: Database, did: string) => { - const res = await db.db - .selectFrom('record') - .select('uri') - .where('did', '=', did) - .execute() - return res.map((row) => row.uri) - } - it('reprocesses repo data', async () => { - const db = network.bsky.ctx.db.getPrimary() - const urisBefore = await getRecordUris(db, alice) - await db.db.deleteFrom('record').where('did', '=', alice).execute() - const indexerPort = network.bsky.indexer.ctx.cfg.indexerPort - await axios.post(`http://localhost:${indexerPort}/reprocess/${alice}`) - await network.processAll() - const urisAfter = await getRecordUris(db, alice) - expect(urisAfter.sort()).toEqual(urisBefore.sort()) - }) - - it('buffers commits while reprocessing repo data', async () => { - const db = network.bsky.ctx.db.getPrimary() - const urisBefore = await getRecordUris(db, alice) - await db.db.deleteFrom('record').where('did', '=', alice).execute() - const indexerPort = network.bsky.indexer.ctx.cfg.indexerPort - const toDeleteIndex = urisBefore.findIndex((uri) => - uri.includes('app.bsky.feed.post'), - ) - if (toDeleteIndex < 0) { - throw new Error('could not find post to delete') - } - // request reprocess while buffering a new post & delete - const [newPost] = await Promise.all([ - sc.post(alice, 'blah blah'), - axios.post(`http://localhost:${indexerPort}/reprocess/${alice}`), - sc.deletePost(alice, new AtUri(urisBefore[toDeleteIndex])), - ]) - await network.processAll() - const urisAfter = await getRecordUris(db, alice) - const expected = [ - ...urisBefore.slice(0, toDeleteIndex), - ...urisBefore.slice(toDeleteIndex + 1), - newPost.ref.uriStr, - ] - expect(urisAfter.sort()).toEqual(expected.sort()) - }) -}) diff --git a/packages/bsky/tests/subscription/repo.test.ts b/packages/bsky/tests/subscription/repo.test.ts index 1c83e4c0cca..9d40af78c00 100644 --- a/packages/bsky/tests/subscription/repo.test.ts +++ b/packages/bsky/tests/subscription/repo.test.ts @@ -4,7 +4,7 @@ import { CommitData } from '@atproto/repo' import { PreparedWrite } from '@atproto/pds/src/repo' import * as sequencer from '@atproto/pds/src/sequencer' import { cborDecode, cborEncode } from '@atproto/common' -import { DatabaseSchemaType } from '../../src/db/database-schema' +import { DatabaseSchemaType } from '../../src/data-plane/server/db/database-schema' import { ids } from '../../src/lexicon/lexicons' import { forSnapshot } from '../_util' import { AppContext, Database } from '../../src' diff --git a/packages/bsky/tests/subscription/util.test.ts b/packages/bsky/tests/subscription/util.test.ts index 497532f643b..d0babb5c3dc 100644 --- a/packages/bsky/tests/subscription/util.test.ts +++ b/packages/bsky/tests/subscription/util.test.ts @@ -3,7 +3,7 @@ import { ConsecutiveList, LatestQueue, PartitionedQueue, -} from '../../src/subscription/util' +} from '../../src/data-plane/server/subscription/util' import { randomStr } from '../../../crypto/src' describe('subscription utils', () => { From 975ba4db14c8da4c1932c62a2d9245f62e9ccf95 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Wed, 20 Dec 2023 23:33:06 -0500 Subject: [PATCH 05/17] add basic in-mem repo subscription to dataplane mock --- .../data-plane/server/subscription/index.ts | 324 ++++++++++++++++++ 1 file changed, 324 insertions(+) create mode 100644 packages/bsky/src/data-plane/server/subscription/index.ts diff --git a/packages/bsky/src/data-plane/server/subscription/index.ts b/packages/bsky/src/data-plane/server/subscription/index.ts new file mode 100644 index 00000000000..2d8efc2d604 --- /dev/null +++ b/packages/bsky/src/data-plane/server/subscription/index.ts @@ -0,0 +1,324 @@ +import assert from 'node:assert' +import { CID } from 'multiformats/cid' +import { AtUri } from '@atproto/syntax' +import { Subscription } from '@atproto/xrpc-server' +import { cborDecode, handleAllSettledErrors } from '@atproto/common' +import { ValidationError } from '@atproto/lexicon' +import { IdResolver } from '@atproto/identity' +import { + WriteOpAction, + readCarWithRoot, + cborToLexRecord, + def, + Commit, +} from '@atproto/repo' +import { ids, lexicons } from '../../../lexicon/lexicons' +import { OutputSchema as Message } from '../../../lexicon/types/com/atproto/sync/subscribeRepos' +import * as message from '../../../lexicon/types/com/atproto/sync/subscribeRepos' +import { subLogger as log } from '../../../logger' +import { IndexingService } from '../indexing' +import { PrimaryDatabase } from '../db' +import { + ConsecutiveItem, + ConsecutiveList, + PartitionedQueue, + ProcessableMessage, + loggableMessage, +} from './util' + +export class RepoSubscription { + ac = new AbortController() + running: Promise | undefined + cursor = 0 + repoQueue = new PartitionedQueue({ concurrency: Infinity }) + consecutive = new ConsecutiveList() + indexingSvc: IndexingService + + constructor( + private opts: { + service: string + db: PrimaryDatabase + idResolver: IdResolver + }, + ) { + this.indexingSvc = new IndexingService(this.opts.db, this.opts.idResolver) + } + + run() { + if (this.running) return + this.ac = new AbortController() + this.repoQueue = new PartitionedQueue({ concurrency: Infinity }) + this.consecutive = new ConsecutiveList() + this.running = this.process() + .catch((err) => { + // allow this to cause an unhandled rejection, let deployment handle the crash. + log.error({ err }, 'subscription crashed') + throw err + }) + .finally(() => (this.running = undefined)) + } + + private async process() { + const sub = this.getSubscription() + for await (const msg of sub) { + const details = getMessageDetails(msg) + if ('info' in details) { + // These messages are not sequenced, we just log them and carry on + log.warn( + { provider: this.opts.service, message: loggableMessage(msg) }, + `sub ${details.info ? 'info' : 'unknown'} message`, + ) + continue + } + const item = this.consecutive.push(details.seq) + this.repoQueue.add(details.repo, async () => { + await this.handleMessage(item, details) + }) + await this.repoQueue.main.onEmpty() // backpressure + } + } + + private async handleMessage( + item: ConsecutiveItem, + envelope: Envelope, + ) { + const msg = envelope.message + try { + if (message.isCommit(msg)) { + await this.handleCommit(msg) + } else if (message.isHandle(msg)) { + await this.handleUpdateHandle(msg) + } else if (message.isTombstone(msg)) { + await this.handleTombstone(msg) + } else if (message.isMigrate(msg)) { + // Ignore migrations + } else { + const exhaustiveCheck: never = msg + throw new Error(`Unhandled message type: ${exhaustiveCheck['$type']}`) + } + } catch (err) { + // We log messages we can't process and move on: + // otherwise the cursor would get stuck on a poison message. + log.error( + { err, message: loggableMessage(msg) }, + 'indexer message processing error', + ) + } finally { + const latest = item.complete().at(-1) + if (latest !== undefined) { + this.cursor = latest + } + } + } + + private async handleCommit(msg: message.Commit) { + const indexRecords = async () => { + const { root, rootCid, ops } = await getOps(msg) + if (msg.tooBig) { + await this.indexingSvc.indexRepo(msg.repo, rootCid.toString()) + await this.indexingSvc.setCommitLastSeen(root, msg) + return + } + if (msg.rebase) { + const needsReindex = await this.indexingSvc.checkCommitNeedsIndexing( + root, + ) + if (needsReindex) { + await this.indexingSvc.indexRepo(msg.repo, rootCid.toString()) + } + await this.indexingSvc.setCommitLastSeen(root, msg) + return + } + for (const op of ops) { + if (op.action === WriteOpAction.Delete) { + await this.indexingSvc.deleteRecord(op.uri) + } else { + try { + await this.indexingSvc.indexRecord( + op.uri, + op.cid, + op.record, + op.action, // create or update + msg.time, + ) + } catch (err) { + if (err instanceof ValidationError) { + log.warn( + { + did: msg.repo, + commit: msg.commit.toString(), + uri: op.uri.toString(), + cid: op.cid.toString(), + }, + 'skipping indexing of invalid record', + ) + } else { + log.error( + { + err, + did: msg.repo, + commit: msg.commit.toString(), + uri: op.uri.toString(), + cid: op.cid.toString(), + }, + 'skipping indexing due to error processing record', + ) + } + } + } + } + await this.indexingSvc.setCommitLastSeen(root, msg) + } + const results = await Promise.allSettled([ + indexRecords(), + this.indexingSvc.indexHandle(msg.repo, msg.time), + ]) + handleAllSettledErrors(results) + } + + private async handleUpdateHandle(msg: message.Handle) { + await this.indexingSvc.indexHandle(msg.did, msg.time, true) + } + + private async handleTombstone(msg: message.Tombstone) { + await this.indexingSvc.tombstoneActor(msg.did) + } + + private getSubscription() { + return new Subscription({ + service: this.opts.service, + method: ids.ComAtprotoSyncSubscribeRepos, + signal: this.ac.signal, + getParams: async () => { + return { cursor: this.cursor } + }, + onReconnectError: (err, reconnects, initial) => { + log.warn({ err, reconnects, initial }, 'sub reconnect') + }, + validate: (value) => { + try { + return lexicons.assertValidXrpcMessage( + ids.ComAtprotoSyncSubscribeRepos, + value, + ) + } catch (err) { + log.warn( + { + err, + seq: ifNumber(value?.['seq']), + repo: ifString(value?.['repo']), + commit: ifString(value?.['commit']?.toString()), + time: ifString(value?.['time']), + provider: this.opts.service, + }, + 'ingester sub skipped invalid message', + ) + } + }, + }) + } + + async destroy() { + this.ac.abort() + await this.running + await this.repoQueue.destroy() + } +} + +type Envelope = { + repo: string + message: ProcessableMessage +} + +function ifString(val: unknown): string | undefined { + return typeof val === 'string' ? val : undefined +} + +function ifNumber(val: unknown): number | undefined { + return typeof val === 'number' ? val : undefined +} + +function getMessageDetails(msg: Message): + | { info: message.Info | null } + | { + seq: number + repo: string + message: ProcessableMessage + } { + if (message.isCommit(msg)) { + return { seq: msg.seq, repo: msg.repo, message: msg } + } else if (message.isHandle(msg)) { + return { seq: msg.seq, repo: msg.did, message: msg } + } else if (message.isMigrate(msg)) { + return { seq: msg.seq, repo: msg.did, message: msg } + } else if (message.isTombstone(msg)) { + return { seq: msg.seq, repo: msg.did, message: msg } + } else if (message.isInfo(msg)) { + return { info: msg } + } + return { info: null } +} + +async function getOps( + msg: message.Commit, +): Promise<{ root: Commit; rootCid: CID; ops: PreparedWrite[] }> { + const car = await readCarWithRoot(msg.blocks as Uint8Array) + const rootBytes = car.blocks.get(car.root) + assert(rootBytes, 'Missing commit block in car slice') + + const root = def.commit.schema.parse(cborDecode(rootBytes)) + const ops: PreparedWrite[] = msg.ops.map((op) => { + const [collection, rkey] = op.path.split('/') + assert(collection && rkey) + if ( + op.action === WriteOpAction.Create || + op.action === WriteOpAction.Update + ) { + assert(op.cid) + const record = car.blocks.get(op.cid) + assert(record) + return { + action: + op.action === WriteOpAction.Create + ? WriteOpAction.Create + : WriteOpAction.Update, + cid: op.cid, + record: cborToLexRecord(record), + blobs: [], + uri: AtUri.make(msg.repo, collection, rkey), + } + } else if (op.action === WriteOpAction.Delete) { + return { + action: WriteOpAction.Delete, + uri: AtUri.make(msg.repo, collection, rkey), + } + } else { + throw new Error(`Unknown repo op action: ${op.action}`) + } + }) + + return { root, rootCid: car.root, ops } +} + +type PreparedCreate = { + action: WriteOpAction.Create + uri: AtUri + cid: CID + record: Record + blobs: CID[] // differs from similar type in pds +} + +type PreparedUpdate = { + action: WriteOpAction.Update + uri: AtUri + cid: CID + record: Record + blobs: CID[] // differs from similar type in pds +} + +type PreparedDelete = { + action: WriteOpAction.Delete + uri: AtUri +} + +type PreparedWrite = PreparedCreate | PreparedUpdate | PreparedDelete From a48903d6e35785cb4ba59ed5d1c5bb062057b364 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 13:39:34 -0500 Subject: [PATCH 06/17] fix dev-env, bsky tests, bsky build --- packages/bsky/build.js | 2 +- .../bsky/src/api/app/bsky/feed/getFeed.ts | 2 +- .../bsky/src/api/app/bsky/graph/muteActor.ts | 5 +- .../src/api/app/bsky/graph/unmuteActor.ts | 3 + .../api/app/bsky/notification/registerPush.ts | 20 +- packages/bsky/src/context.ts | 7 +- .../bsky/src/data-plane/server/background.ts | 35 +++ .../data-plane/server/db/tables/moderation.ts | 2 +- packages/bsky/src/data-plane/server/index.ts | 3 + .../src/data-plane/server/indexing/index.ts | 35 +-- .../server/indexing/plugins/block.ts | 8 +- .../server/indexing/plugins/feed-generator.ts | 8 +- .../server/indexing/plugins/follow.ts | 8 +- .../server/indexing/plugins/like.ts | 8 +- .../server/indexing/plugins/list-block.ts | 8 +- .../server/indexing/plugins/list-item.ts | 8 +- .../server/indexing/plugins/list.ts | 8 +- .../server/indexing/plugins/post.ts | 8 +- .../server/indexing/plugins/profile.ts | 8 +- .../server/indexing/plugins/repost.ts | 8 +- .../server/indexing/plugins/thread-gate.ts | 8 +- .../data-plane/server/indexing/processor.ts | 19 +- .../data-plane/server/routes/moderation.ts | 87 +++++- .../src/data-plane/server/routes/mutes.ts | 2 +- .../data-plane/server/subscription/index.ts | 21 +- packages/bsky/src/index.ts | 1 + .../tests/__snapshots__/indexing.test.ts.snap | 30 +- packages/bsky/tests/blob-resolver.test.ts | 1 - packages/bsky/tests/db.test.ts | 83 +----- packages/bsky/tests/did-cache.test.ts | 14 +- packages/bsky/tests/duplicate-records.test.ts | 50 ++-- packages/bsky/tests/feed-generation.test.ts | 33 +-- .../bsky/tests/handle-invalidation.test.ts | 8 +- packages/bsky/tests/image/server.test.ts | 1 - packages/bsky/tests/indexing.test.ts | 116 ++++---- packages/bsky/tests/server.test.ts | 18 +- packages/bsky/tests/subscription/repo.test.ts | 24 +- .../__snapshots__/author-feed.test.ts.snap | 98 +------ .../__snapshots__/block-lists.test.ts.snap | 11 +- .../views/__snapshots__/blocks.test.ts.snap | 30 +- .../__snapshots__/list-feed.test.ts.snap | 49 +--- .../__snapshots__/mute-lists.test.ts.snap | 19 +- .../views/__snapshots__/mutes.test.ts.snap | 19 +- .../__snapshots__/notifications.test.ts.snap | 49 +--- .../views/__snapshots__/thread.test.ts.snap | 95 +------ .../views/__snapshots__/timeline.test.ts.snap | 207 +------------- .../bsky/tests/views/actor-search.test.ts | 26 +- .../tests/views/admin/repo-search.test.ts | 133 --------- packages/bsky/tests/views/author-feed.test.ts | 78 ++---- packages/bsky/tests/views/follows.test.ts | 76 ++--- packages/bsky/tests/views/list-feed.test.ts | 78 ++---- packages/bsky/tests/views/mute-lists.test.ts | 1 - packages/bsky/tests/views/mutes.test.ts | 2 +- .../bsky/tests/views/notifications.test.ts | 43 +-- packages/bsky/tests/views/profile.test.ts | 39 +-- .../tests/views/suggested-follows.test.ts | 3 +- packages/bsky/tests/views/suggestions.test.ts | 3 +- packages/bsky/tests/views/thread.test.ts | 238 ++++------------ packages/bsky/tests/views/timeline.test.ts | 144 ++++------ packages/dev-env/src/bsky.ts | 260 ++---------------- packages/dev-env/src/network.ts | 10 +- packages/dev-env/src/types.ts | 6 +- packages/dev-env/src/util.ts | 1 - 63 files changed, 623 insertions(+), 1805 deletions(-) create mode 100644 packages/bsky/src/data-plane/server/background.ts delete mode 100644 packages/bsky/tests/views/admin/repo-search.test.ts diff --git a/packages/bsky/build.js b/packages/bsky/build.js index 3822d9bc98f..85c4a88243b 100644 --- a/packages/bsky/build.js +++ b/packages/bsky/build.js @@ -5,7 +5,7 @@ const buildShallow = require('esbuild').build({ logLevel: 'info', - entryPoints: ['src/index.ts', 'src/db/index.ts'], + entryPoints: ['src/index.ts'], bundle: true, sourcemap: true, outdir: 'dist', diff --git a/packages/bsky/src/api/app/bsky/feed/getFeed.ts b/packages/bsky/src/api/app/bsky/feed/getFeed.ts index 309e4400727..569dc99636a 100644 --- a/packages/bsky/src/api/app/bsky/feed/getFeed.ts +++ b/packages/bsky/src/api/app/bsky/feed/getFeed.ts @@ -15,7 +15,7 @@ import { QueryParams as GetFeedParams } from '../../../../lexicon/types/app/bsky import { OutputSchema as SkeletonOutput } from '../../../../lexicon/types/app/bsky/feed/getFeedSkeleton' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' -import { AlgoResponse, AlgoResponseItem } from '../../../../feed-gen/types' +import { AlgoResponse, AlgoResponseItem } from '../../../feed-gen/types' import { HydrationFnInput, PresentationFnInput, diff --git a/packages/bsky/src/api/app/bsky/graph/muteActor.ts b/packages/bsky/src/api/app/bsky/graph/muteActor.ts index be205f9f9b0..a4918124f6d 100644 --- a/packages/bsky/src/api/app/bsky/graph/muteActor.ts +++ b/packages/bsky/src/api/app/bsky/graph/muteActor.ts @@ -1,3 +1,4 @@ +import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' @@ -7,7 +8,9 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ auth, input }) => { const { actor } = input.body const viewer = auth.credentials.did - await ctx.dataplane.muteActor({ actorDid: viewer, subjectDid: actor }) + const [did] = await ctx.hydrator.actor.getDids([actor]) + if (!did) throw new InvalidRequestError('Actor not found') + await ctx.dataplane.muteActor({ actorDid: viewer, subjectDid: did }) }, }) } diff --git a/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts b/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts index 80da8fd844d..5b69f13ffb7 100644 --- a/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts +++ b/packages/bsky/src/api/app/bsky/graph/unmuteActor.ts @@ -1,3 +1,4 @@ +import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' @@ -7,6 +8,8 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ auth, input }) => { const { actor } = input.body const viewer = auth.credentials.did + const [did] = await ctx.hydrator.actor.getDids([actor]) + if (!did) throw new InvalidRequestError('Actor not found') await ctx.dataplane.unmuteActor({ actorDid: viewer, subjectDid: actor }) }, }) diff --git a/packages/bsky/src/api/app/bsky/notification/registerPush.ts b/packages/bsky/src/api/app/bsky/notification/registerPush.ts index be7d373bcd4..b0c17642a48 100644 --- a/packages/bsky/src/api/app/bsky/notification/registerPush.ts +++ b/packages/bsky/src/api/app/bsky/notification/registerPush.ts @@ -1,31 +1,17 @@ import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' -import { Platform } from '../../../../notifications' export default function (server: Server, ctx: AppContext) { server.app.bsky.notification.registerPush({ auth: ctx.authVerifier, handler: async ({ auth, input }) => { - const { token, platform, serviceDid, appId } = input.body - const { - credentials: { did }, - } = auth + const { serviceDid } = input.body if (serviceDid !== auth.artifacts.aud) { throw new InvalidRequestError('Invalid serviceDid.') } - const { notifServer } = ctx - if (platform !== 'ios' && platform !== 'android' && platform !== 'web') { - throw new InvalidRequestError( - 'Unsupported platform: must be "ios", "android", or "web".', - ) - } - await notifServer.registerDeviceForPushNotifications( - did, - token, - platform as Platform, - appId, - ) + // @TODO fix pending appview v2 buildout + throw new InvalidRequestError('not currently supported') }, }) } diff --git a/packages/bsky/src/context.ts b/packages/bsky/src/context.ts index bcad63760c1..fa372725724 100644 --- a/packages/bsky/src/context.ts +++ b/packages/bsky/src/context.ts @@ -1,5 +1,5 @@ import * as plc from '@did-plc/lib' -import { IdResolver } from '@atproto/identity' +import { DidCache, IdResolver } from '@atproto/identity' import { AtpAgent } from '@atproto/api' import { Keypair } from '@atproto/crypto' import { createServiceJwt } from '@atproto/xrpc-server' @@ -20,6 +20,7 @@ export class AppContext { views: Views signingKey: Keypair idResolver: IdResolver + didCache?: DidCache algos: MountedAlgos }, ) {} @@ -52,6 +53,10 @@ export class AppContext { return this.opts.idResolver } + get didCache(): DidCache | undefined { + return this.opts.didCache + } + get authVerifier() { return auth.authVerifier(this.idResolver, { aud: this.cfg.serverDid }) } diff --git a/packages/bsky/src/data-plane/server/background.ts b/packages/bsky/src/data-plane/server/background.ts new file mode 100644 index 00000000000..0ce785b21ce --- /dev/null +++ b/packages/bsky/src/data-plane/server/background.ts @@ -0,0 +1,35 @@ +import PQueue from 'p-queue' +import { PrimaryDatabase } from './db' +import { dbLogger } from '../../logger' + +// A simple queue for in-process, out-of-band/backgrounded work + +export class BackgroundQueue { + queue = new PQueue() + destroyed = false + constructor(public db: PrimaryDatabase) {} + + add(task: Task) { + if (this.destroyed) { + return + } + this.queue + .add(() => task(this.db)) + .catch((err) => { + dbLogger.error(err, 'background queue task failed') + }) + } + + async processAll() { + await this.queue.onIdle() + } + + // On destroy we stop accepting new tasks, but complete all pending/in-progress tasks. + // The application calls this only once http connections have drained (tasks no longer being added). + async destroy() { + this.destroyed = true + await this.queue.onIdle() + } +} + +type Task = (db: PrimaryDatabase) => Promise diff --git a/packages/bsky/src/data-plane/server/db/tables/moderation.ts b/packages/bsky/src/data-plane/server/db/tables/moderation.ts index f1ac3572785..c483ae20a4c 100644 --- a/packages/bsky/src/data-plane/server/db/tables/moderation.ts +++ b/packages/bsky/src/data-plane/server/db/tables/moderation.ts @@ -3,7 +3,7 @@ import { REVIEWCLOSED, REVIEWOPEN, REVIEWESCALATED, -} from '../../lexicon/types/com/atproto/admin/defs' +} from '../../../../lexicon/types/com/atproto/admin/defs' export const eventTableName = 'moderation_event' export const subjectStatusTableName = 'moderation_subject_status' diff --git a/packages/bsky/src/data-plane/server/index.ts b/packages/bsky/src/data-plane/server/index.ts index 9e37667cca8..a488b045d0d 100644 --- a/packages/bsky/src/data-plane/server/index.ts +++ b/packages/bsky/src/data-plane/server/index.ts @@ -5,6 +5,9 @@ import { expressConnectMiddleware } from '@connectrpc/connect-express' import createRoutes from './routes' import { Database } from './db' +export { DidSqlCache } from './did-cache' +export { RepoSubscription } from './subscription' + export class DataPlaneServer { constructor(public server: http.Server) {} diff --git a/packages/bsky/src/data-plane/server/indexing/index.ts b/packages/bsky/src/data-plane/server/indexing/index.ts index e18f01bdb2a..69fc73dc0e6 100644 --- a/packages/bsky/src/data-plane/server/indexing/index.ts +++ b/packages/bsky/src/data-plane/server/indexing/index.ts @@ -29,6 +29,7 @@ import * as FeedGenerator from './plugins/feed-generator' import RecordProcessor from './processor' import { subLogger } from '../../../logger' import { retryHttp } from '../../../util/retry' +import { BackgroundQueue } from '../background' export class IndexingService { records: { @@ -45,29 +46,29 @@ export class IndexingService { feedGenerator: FeedGenerator.PluginType } - constructor(public db: PrimaryDatabase, public idResolver: IdResolver) { + constructor( + public db: PrimaryDatabase, + public idResolver: IdResolver, + public background: BackgroundQueue, + ) { this.records = { - post: Post.makePlugin(this.db), - threadGate: Threadgate.makePlugin(this.db), - like: Like.makePlugin(this.db), - repost: Repost.makePlugin(this.db), - follow: Follow.makePlugin(this.db), - profile: Profile.makePlugin(this.db), - list: List.makePlugin(this.db), - listItem: ListItem.makePlugin(this.db), - listBlock: ListBlock.makePlugin(this.db), - block: Block.makePlugin(this.db), - feedGenerator: FeedGenerator.makePlugin(this.db), + post: Post.makePlugin(this.db, this.background), + threadGate: Threadgate.makePlugin(this.db, this.background), + like: Like.makePlugin(this.db, this.background), + repost: Repost.makePlugin(this.db, this.background), + follow: Follow.makePlugin(this.db, this.background), + profile: Profile.makePlugin(this.db, this.background), + list: List.makePlugin(this.db, this.background), + listItem: ListItem.makePlugin(this.db, this.background), + listBlock: ListBlock.makePlugin(this.db, this.background), + block: Block.makePlugin(this.db, this.background), + feedGenerator: FeedGenerator.makePlugin(this.db, this.background), } } transact(txn: PrimaryDatabase) { txn.assertTransaction() - return new IndexingService(txn, this.idResolver) - } - - static creator(idResolver: IdResolver) { - return (db: PrimaryDatabase) => new IndexingService(db, idResolver) + return new IndexingService(txn, this.idResolver, this.background) } async indexRecord( diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/block.ts b/packages/bsky/src/data-plane/server/indexing/plugins/block.ts index 495c5be6c93..feb94b3256f 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/block.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/block.ts @@ -6,6 +6,7 @@ import * as lex from '../../../../lexicon/lexicons' import { PrimaryDatabase } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyGraphBlock type IndexedBlock = Selectable @@ -69,8 +70,11 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts b/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts index f2876eef053..7af296fb26f 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts @@ -6,6 +6,7 @@ import * as lex from '../../../../lexicon/lexicons' import { PrimaryDatabase } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyFeedGenerator type IndexedFeedGenerator = Selectable @@ -68,8 +69,11 @@ export type PluginType = RecordProcessor< IndexedFeedGenerator > -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts b/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts index dfdfbdb9631..f8f10069191 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts @@ -7,6 +7,7 @@ import RecordProcessor from '../processor' import { PrimaryDatabase } from '../../db' import { countAll, excluded } from '../../db/util' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyGraphFollow type IndexedFollow = Selectable @@ -116,8 +117,11 @@ const updateAggregates = async (db: DatabaseSchema, follow: IndexedFollow) => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/like.ts b/packages/bsky/src/data-plane/server/indexing/plugins/like.ts index 431d76a8656..849e0ed5bbb 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/like.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/like.ts @@ -7,6 +7,7 @@ import RecordProcessor from '../processor' import { countAll, excluded } from '../../db/util' import { PrimaryDatabase } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyFeedLike type IndexedLike = Selectable @@ -106,8 +107,11 @@ const updateAggregates = async (db: DatabaseSchema, like: IndexedLike) => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts index 3de8889808f..da6d1481553 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts @@ -6,6 +6,7 @@ import * as lex from '../../../../lexicon/lexicons' import { PrimaryDatabase } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyGraphListblock type IndexedListBlock = Selectable @@ -69,8 +70,11 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts index 778fa599b62..37b987f8f18 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts @@ -7,6 +7,7 @@ import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' import { PrimaryDatabase } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyGraphListitem type IndexedListItem = Selectable @@ -77,8 +78,11 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/list.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list.ts index 09b04834293..52dc67e4e7d 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/list.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list.ts @@ -6,6 +6,7 @@ import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' import { PrimaryDatabase } from '../../db' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyGraphList type IndexedList = Selectable @@ -65,8 +66,11 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/post.ts b/packages/bsky/src/data-plane/server/indexing/plugins/post.ts index bb162f139b3..1ed538a55b8 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/post.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/post.ts @@ -28,6 +28,7 @@ import { violatesThreadGate as checkViolatesThreadGate, postToThreadgateUri, } from '../../util' +import { BackgroundQueue } from '../../background' type Notif = Insertable type Post = Selectable @@ -393,8 +394,11 @@ const updateAggregates = async (db: DatabaseSchema, postIdx: IndexedPost) => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts b/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts index 9ea12135e30..1a4f3804f55 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts @@ -5,6 +5,7 @@ import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' import { PrimaryDatabase } from '../../db' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyActorProfile type IndexedProfile = DatabaseSchemaType['profile'] @@ -61,8 +62,11 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts b/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts index f31f537daf3..8e6c794fcec 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts @@ -7,6 +7,7 @@ import RecordProcessor from '../processor' import { PrimaryDatabase } from '../../db' import { countAll, excluded } from '../../db/util' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyFeedRepost type IndexedRepost = Selectable @@ -131,8 +132,11 @@ const updateAggregates = async (db: DatabaseSchema, repost: IndexedRepost) => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts b/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts index 80596a05080..c306602f973 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts @@ -6,6 +6,7 @@ import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import { PrimaryDatabase } from '../../db' import RecordProcessor from '../processor' +import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyFeedThreadgate type IndexedGate = DatabaseSchemaType['thread_gate'] @@ -74,8 +75,11 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor -export const makePlugin = (db: PrimaryDatabase): PluginType => { - return new RecordProcessor(db, { +export const makePlugin = ( + db: PrimaryDatabase, + background: BackgroundQueue, +): PluginType => { + return new RecordProcessor(db, background, { lexId, insertFn, findDuplicate, diff --git a/packages/bsky/src/data-plane/server/indexing/processor.ts b/packages/bsky/src/data-plane/server/indexing/processor.ts index 1dc540444d8..341dd1ccb0a 100644 --- a/packages/bsky/src/data-plane/server/indexing/processor.ts +++ b/packages/bsky/src/data-plane/server/indexing/processor.ts @@ -7,6 +7,7 @@ import { lexicons } from '../../../lexicon/lexicons' import { PrimaryDatabase } from '../db' import DatabaseSchema from '../db/database-schema' import { Notification } from '../db/tables/notification' +import { BackgroundQueue } from '../background' // @NOTE re: insertions and deletions. Due to how record updates are handled, // (insertFn) should have the same effect as (insertFn -> deleteFn -> insertFn). @@ -40,6 +41,7 @@ export class RecordProcessor { db: DatabaseSchema constructor( private appDb: PrimaryDatabase, + private background: BackgroundQueue, private params: RecordProcessorParams, ) { this.db = appDb.db @@ -86,7 +88,7 @@ export class RecordProcessor { timestamp, ) if (inserted) { - await this.aggregateOnCommit(inserted) + this.aggregateOnCommit(inserted) if (!opts?.disableNotifs) { await this.handleNotifs({ inserted }) } @@ -153,7 +155,7 @@ export class RecordProcessor { // If a record was updated but hadn't been indexed yet, treat it like a plain insert. return this.insertRecord(uri, cid, obj, timestamp) } - await this.aggregateOnCommit(deleted) + this.aggregateOnCommit(deleted) const inserted = await this.params.insertFn( this.db, uri, @@ -166,7 +168,7 @@ export class RecordProcessor { 'Record update failed: removed from index but could not be replaced', ) } - await this.aggregateOnCommit(inserted) + this.aggregateOnCommit(inserted) if (!opts?.disableNotifs) { await this.handleNotifs({ inserted, deleted }) } @@ -183,7 +185,7 @@ export class RecordProcessor { .execute() const deleted = await this.params.deleteFn(this.db, uri) if (!deleted) return - await this.aggregateOnCommit(deleted) + this.aggregateOnCommit(deleted) if (cascading) { await this.db .deleteFrom('duplicate_record') @@ -215,7 +217,7 @@ export class RecordProcessor { found.indexedAt, ) if (inserted) { - await this.aggregateOnCommit(inserted) + this.aggregateOnCommit(inserted) } await this.handleNotifs({ deleted, inserted: inserted ?? undefined }) } @@ -254,9 +256,12 @@ export class RecordProcessor { } } - async aggregateOnCommit(indexed: S) { + aggregateOnCommit(indexed: S) { const { updateAggregates } = this.params - await updateAggregates?.(this.db, indexed) + if (!updateAggregates) return + this.appDb.onCommit(() => { + this.background.add((db) => updateAggregates(db.db, indexed)) + }) } } diff --git a/packages/bsky/src/data-plane/server/routes/moderation.ts b/packages/bsky/src/data-plane/server/routes/moderation.ts index 64a3c343e9f..dd230b09a77 100644 --- a/packages/bsky/src/data-plane/server/routes/moderation.ts +++ b/packages/bsky/src/data-plane/server/routes/moderation.ts @@ -2,6 +2,7 @@ import { sql } from 'kysely' import { ServiceImpl } from '@connectrpc/connect' import { Service } from '../../gen/bsky_connect' import { Database } from '../db' +import { didFromUri } from '../../../hydration/util' export default (db: Database): Partial> => ({ async getBlobTakedown(req) { @@ -17,7 +18,89 @@ export default (db: Database): Partial> => ({ } }, - async updateTakedown(_req) { - throw new Error('unimplemented') + async updateTakedown(req) { + const { actorDid, recordUri, blobCid, takenDown } = req + const now = new Date() + if (actorDid && !blobCid) { + if (takenDown) { + const { id } = await db.db + .insertInto('moderation_event') + .values({ + action: 'com.atproto.admin.defs#modEventTakedown', + subjectDid: actorDid, + subjectType: 'com.atproto.admin.defs#repoRef', + createdAt: now.toISOString(), + createdBy: 'admin', + }) + .returning('id') + .executeTakeFirstOrThrow() + await db.db + .updateTable('actor') + .set({ takedownId: id }) + .where('did', '=', actorDid) + .execute() + } else { + await db.db + .updateTable('actor') + .set({ takedownId: null }) + .where('did', '=', actorDid) + .execute() + } + } + + if (actorDid && blobCid) { + if (takenDown) { + await db.db + .insertInto('moderation_subject_status') + .values({ + did: actorDid, + blobCids: [blobCid], + recordPath: '', + takendown: true, + createdAt: now.toISOString(), + updatedAt: now.toISOString(), + reviewState: 'com.atproto.admin.defs#reviewOpen', + }) + .execute() + } else { + await db.db + .deleteFrom('moderation_subject_status') + .where('did', '=', actorDid) + .where( + 'blobCids', + '@>', + sql`CAST(${JSON.stringify([blobCid])} AS JSONB)`, + ) + .executeTakeFirst() + } + } + + if (recordUri) { + if (takenDown) { + const { id } = await db.db + .insertInto('moderation_event') + .values({ + action: 'com.atproto.admin.defs#modEventTakedown', + subjectDid: didFromUri(recordUri), + subjectUri: recordUri, + subjectType: 'com.atproto.repo.strongRef', + createdAt: now.toISOString(), + createdBy: 'admin', + }) + .returning('id') + .executeTakeFirstOrThrow() + await db.db + .updateTable('record') + .set({ takedownId: id }) + .where('uri', '=', recordUri) + .execute() + } else { + await db.db + .updateTable('record') + .set({ takedownId: null }) + .where('uri', '=', recordUri) + .execute() + } + } }, }) diff --git a/packages/bsky/src/data-plane/server/routes/mutes.ts b/packages/bsky/src/data-plane/server/routes/mutes.ts index 8cb7736b85d..5d057656f2b 100644 --- a/packages/bsky/src/data-plane/server/routes/mutes.ts +++ b/packages/bsky/src/data-plane/server/routes/mutes.ts @@ -108,7 +108,7 @@ export default (db: Database): Partial> => ({ async muteActor(req) { const { actorDid, subjectDid } = req - assert(actorDid !== subjectDid, 'cannot mute yourself') + assert(actorDid !== subjectDid, 'cannot mute yourself') // @TODO pass message through in http error await db.db .insertInto('mute') .values({ diff --git a/packages/bsky/src/data-plane/server/subscription/index.ts b/packages/bsky/src/data-plane/server/subscription/index.ts index 2d8efc2d604..5054441b6b9 100644 --- a/packages/bsky/src/data-plane/server/subscription/index.ts +++ b/packages/bsky/src/data-plane/server/subscription/index.ts @@ -25,13 +25,16 @@ import { ProcessableMessage, loggableMessage, } from './util' +import { BackgroundQueue } from '../background' export class RepoSubscription { ac = new AbortController() running: Promise | undefined cursor = 0 + seenSeq: number | null = null repoQueue = new PartitionedQueue({ concurrency: Infinity }) consecutive = new ConsecutiveList() + background: BackgroundQueue indexingSvc: IndexingService constructor( @@ -39,9 +42,15 @@ export class RepoSubscription { service: string db: PrimaryDatabase idResolver: IdResolver + background: BackgroundQueue }, ) { - this.indexingSvc = new IndexingService(this.opts.db, this.opts.idResolver) + this.background = new BackgroundQueue(this.opts.db) + this.indexingSvc = new IndexingService( + this.opts.db, + this.opts.idResolver, + this.background, + ) } run() { @@ -51,9 +60,11 @@ export class RepoSubscription { this.consecutive = new ConsecutiveList() this.running = this.process() .catch((err) => { - // allow this to cause an unhandled rejection, let deployment handle the crash. - log.error({ err }, 'subscription crashed') - throw err + if (err.name !== 'AbortError') { + // allow this to cause an unhandled rejection, let deployment handle the crash. + log.error({ err }, 'subscription crashed') + throw err + } }) .finally(() => (this.running = undefined)) } @@ -74,6 +85,7 @@ export class RepoSubscription { this.repoQueue.add(details.repo, async () => { await this.handleMessage(item, details) }) + this.seenSeq = details.seq await this.repoQueue.main.onEmpty() // backpressure } } @@ -222,6 +234,7 @@ export class RepoSubscription { this.ac.abort() await this.running await this.repoQueue.destroy() + await this.background.processAll() } } diff --git a/packages/bsky/src/index.ts b/packages/bsky/src/index.ts index 2ed2c0763b0..b0967cfaee9 100644 --- a/packages/bsky/src/index.ts +++ b/packages/bsky/src/index.ts @@ -87,6 +87,7 @@ export class BskyAppView { views, signingKey, idResolver, + didCache, algos, }) diff --git a/packages/bsky/tests/__snapshots__/indexing.test.ts.snap b/packages/bsky/tests/__snapshots__/indexing.test.ts.snap index 88c02c6e3e0..881dbb59b50 100644 --- a/packages/bsky/tests/__snapshots__/indexing.test.ts.snap +++ b/packages/bsky/tests/__snapshots__/indexing.test.ts.snap @@ -108,24 +108,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -312,16 +295,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(6)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(6)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/blob-resolver.test.ts b/packages/bsky/tests/blob-resolver.test.ts index 79491c5601e..8367fdb02ea 100644 --- a/packages/bsky/tests/blob-resolver.test.ts +++ b/packages/bsky/tests/blob-resolver.test.ts @@ -18,7 +18,6 @@ describe('blob resolver', () => { const sc = network.getSeedClient() await basicSeed(sc) await network.processAll() - await network.bsky.processAll() fileDid = sc.dids.carol fileCid = sc.posts[fileDid][0].images[0].image.ref client = axios.create({ diff --git a/packages/bsky/tests/db.test.ts b/packages/bsky/tests/db.test.ts index bb7562e9a92..4b1e5ad507e 100644 --- a/packages/bsky/tests/db.test.ts +++ b/packages/bsky/tests/db.test.ts @@ -1,10 +1,7 @@ -import { once } from 'events' import { sql } from 'kysely' import { wait } from '@atproto/common' import { TestNetwork } from '@atproto/dev-env' -import { Database } from '../src' -import { PrimaryDatabase } from '../src/db' -import { Leader } from '../src/db/leader' +import { Database, PrimaryDatabase } from '../src' describe('db', () => { let network: TestNetwork @@ -14,7 +11,7 @@ describe('db', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_db', }) - db = network.bsky.ctx.db.getPrimary() + db = network.bsky.db.getPrimary() }) afterAll(async () => { @@ -189,80 +186,4 @@ describe('db', () => { expect(res.length).toBe(0) }) }) - - describe('Leader', () => { - it('allows leaders to run sequentially.', async () => { - const task = async () => { - await wait(25) - return 'complete' - } - const leader1 = new Leader(707, db) - const leader2 = new Leader(707, db) - const leader3 = new Leader(707, db) - const result1 = await leader1.run(task) - await wait(5) // Short grace period for pg to close session - const result2 = await leader2.run(task) - await wait(5) - const result3 = await leader3.run(task) - await wait(5) - const result4 = await leader3.run(task) - expect([result1, result2, result3, result4]).toEqual([ - { ran: true, result: 'complete' }, - { ran: true, result: 'complete' }, - { ran: true, result: 'complete' }, - { ran: true, result: 'complete' }, - ]) - }) - - it('only allows one leader at a time.', async () => { - const task = async () => { - await wait(75) - return 'complete' - } - const results = await Promise.all([ - new Leader(717, db).run(task), - new Leader(717, db).run(task), - new Leader(717, db).run(task), - ]) - const byRan = (a, b) => Number(a.ran) - Number(b.ran) - expect(results.sort(byRan)).toEqual([ - { ran: false }, - { ran: false }, - { ran: true, result: 'complete' }, - ]) - }) - - it('leaders with different ids do not conflict.', async () => { - const task = async () => { - await wait(75) - return 'complete' - } - const results = await Promise.all([ - new Leader(727, db).run(task), - new Leader(728, db).run(task), - new Leader(729, db).run(task), - ]) - expect(results).toEqual([ - { ran: true, result: 'complete' }, - { ran: true, result: 'complete' }, - { ran: true, result: 'complete' }, - ]) - }) - - it('supports abort.', async () => { - const task = async (ctx: { signal: AbortSignal }) => { - wait(10).then(abort) - return await Promise.race([ - wait(50), - once(ctx.signal, 'abort').then(() => ctx.signal.reason), - ]) - } - const leader = new Leader(737, db) - const abort = () => { - leader.session?.abortController.abort(new Error('Oops!')) - } - const result = await leader.run(task) - expect(result).toEqual({ ran: true, result: new Error('Oops!') }) - }) - }) }) diff --git a/packages/bsky/tests/did-cache.test.ts b/packages/bsky/tests/did-cache.test.ts index d0b94147bc6..d140b529e9e 100644 --- a/packages/bsky/tests/did-cache.test.ts +++ b/packages/bsky/tests/did-cache.test.ts @@ -1,8 +1,8 @@ +import { wait } from '@atproto/common' +import { IdResolver } from '@atproto/identity' import { TestNetwork, SeedClient } from '@atproto/dev-env' import userSeed from './seeds/users' -import { IdResolver } from '@atproto/identity' -import DidSqlCache from '../src/did-cache' -import { wait } from '@atproto/common' +import { DidSqlCache } from '../src' describe('did cache', () => { let network: TestNetwork @@ -19,8 +19,8 @@ describe('did cache', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_did_cache', }) - idResolver = network.bsky.indexer.ctx.idResolver - didCache = network.bsky.indexer.ctx.didCache + idResolver = network.bsky.ctx.idResolver + didCache = network.bsky.ctx.didCache as DidSqlCache sc = network.getSeedClient() await userSeed(sc) await network.processAll() @@ -81,7 +81,7 @@ describe('did cache', () => { }) it('accurately reports expired dids & refreshes the cache', async () => { - const didCache = new DidSqlCache(network.bsky.ctx.db.getPrimary(), 1, 60000) + const didCache = new DidSqlCache(network.bsky.db.getPrimary(), 1, 60000) const shortCacheResolver = new IdResolver({ plcUrl: network.bsky.ctx.cfg.didPlcUrl, didCache, @@ -110,7 +110,7 @@ describe('did cache', () => { }) it('does not return expired dids & refreshes the cache', async () => { - const didCache = new DidSqlCache(network.bsky.ctx.db.getPrimary(), 0, 1) + const didCache = new DidSqlCache(network.bsky.db.getPrimary(), 0, 1) const shortExpireResolver = new IdResolver({ plcUrl: network.bsky.ctx.cfg.didPlcUrl, didCache, diff --git a/packages/bsky/tests/duplicate-records.test.ts b/packages/bsky/tests/duplicate-records.test.ts index 9c7617bd668..bd6ed269227 100644 --- a/packages/bsky/tests/duplicate-records.test.ts +++ b/packages/bsky/tests/duplicate-records.test.ts @@ -2,23 +2,19 @@ import { AtUri } from '@atproto/syntax' import { cidForCbor, TID } from '@atproto/common' import { WriteOpAction } from '@atproto/repo' import { TestNetwork } from '@atproto/dev-env' -import { Database } from '../src' -import { PrimaryDatabase } from '../src/db' import * as lex from '../src/lexicon/lexicons' -import { Services } from '../src/indexer/services' +import { Database, PrimaryDatabase } from '../src' describe('duplicate record', () => { let network: TestNetwork let did: string let db: PrimaryDatabase - let services: Services beforeAll(async () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_duplicates', }) - db = network.bsky.indexer.ctx.db - services = network.bsky.indexer.ctx.services + db = network.bsky.db.getPrimary() did = 'did:example:alice' }) @@ -51,21 +47,25 @@ describe('duplicate record', () => { } const uri = AtUri.make(did, coll, TID.nextStr()) const cid = await cidForCbor(repost) - await services - .indexing(db) - .indexRecord(uri, cid, repost, WriteOpAction.Create, repost.createdAt) + await network.bsky.sub.indexingSvc.indexRecord( + uri, + cid, + repost, + WriteOpAction.Create, + repost.createdAt, + ) uris.push(uri) } let count = await countRecords(db, 'repost') expect(count).toBe(1) - await services.indexing(db).deleteRecord(uris[0], false) + await network.bsky.sub.indexingSvc.deleteRecord(uris[0], false) count = await countRecords(db, 'repost') expect(count).toBe(1) - await services.indexing(db).deleteRecord(uris[1], true) + await network.bsky.sub.indexingSvc.deleteRecord(uris[1], true) count = await countRecords(db, 'repost') expect(count).toBe(0) @@ -87,16 +87,20 @@ describe('duplicate record', () => { } const uri = AtUri.make(did, coll, TID.nextStr()) const cid = await cidForCbor(like) - await services - .indexing(db) - .indexRecord(uri, cid, like, WriteOpAction.Create, like.createdAt) + await network.bsky.sub.indexingSvc.indexRecord( + uri, + cid, + like, + WriteOpAction.Create, + like.createdAt, + ) uris.push(uri) } let count = await countRecords(db, 'like') expect(count).toBe(1) - await services.indexing(db).deleteRecord(uris[0], false) + await network.bsky.sub.indexingSvc.deleteRecord(uris[0], false) count = await countRecords(db, 'like') expect(count).toBe(1) @@ -107,7 +111,7 @@ describe('duplicate record', () => { .executeTakeFirst() expect(got?.uri).toEqual(uris[1].toString()) - await services.indexing(db).deleteRecord(uris[1], true) + await network.bsky.sub.indexingSvc.deleteRecord(uris[1], true) count = await countRecords(db, 'like') expect(count).toBe(0) @@ -124,21 +128,25 @@ describe('duplicate record', () => { } const uri = AtUri.make(did, coll, TID.nextStr()) const cid = await cidForCbor(follow) - await services - .indexing(db) - .indexRecord(uri, cid, follow, WriteOpAction.Create, follow.createdAt) + await network.bsky.sub.indexingSvc.indexRecord( + uri, + cid, + follow, + WriteOpAction.Create, + follow.createdAt, + ) uris.push(uri) } let count = await countRecords(db, 'follow') expect(count).toBe(1) - await services.indexing(db).deleteRecord(uris[0], false) + await network.bsky.sub.indexingSvc.deleteRecord(uris[0], false) count = await countRecords(db, 'follow') expect(count).toBe(1) - await services.indexing(db).deleteRecord(uris[1], true) + await network.bsky.sub.indexingSvc.deleteRecord(uris[1], true) count = await countRecords(db, 'follow') expect(count).toBe(0) diff --git a/packages/bsky/tests/feed-generation.test.ts b/packages/bsky/tests/feed-generation.test.ts index aceecec3204..500b6e68379 100644 --- a/packages/bsky/tests/feed-generation.test.ts +++ b/packages/bsky/tests/feed-generation.test.ts @@ -1,3 +1,6 @@ +import assert from 'assert' +import { XRPCError } from '@atproto/xrpc' +import { AuthRequiredError } from '@atproto/xrpc-server' import { TID } from '@atproto/common' import { AtUri, AtpAgent } from '@atproto/api' import { @@ -16,9 +19,6 @@ import { } from '../src/lexicon/types/app/bsky/feed/defs' import basicSeed from './seeds/basic' import { forSnapshot, paginateAll } from './_util' -import { AuthRequiredError } from '@atproto/xrpc-server' -import assert from 'assert' -import { XRPCError } from '@atproto/xrpc' describe('feed generation', () => { let network: TestNetwork @@ -74,7 +74,7 @@ describe('feed generation', () => { { uri: feedUriBadPagination.toString(), order: 3 }, { uri: primeUri.toString(), order: 4 }, ] - await network.bsky.ctx.db + await network.bsky.db .getPrimary() .db.insertInto('suggested_feed') .values(feedSuggestions) @@ -157,22 +157,11 @@ describe('feed generation', () => { sc.getHeaders(alice), ) await network.processAll() - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: prime.uri, - cid: prime.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: prime.uri, + takenDown: true, + }) + feedUriAll = all.uri feedUriAllRef = new RecordRef(all.uri, all.cid) feedUriEven = even.uri @@ -324,7 +313,6 @@ describe('feed generation', () => { sc.getHeaders(sc.dids.bob), ) await network.processAll() - await network.bsky.processAll() // now take it offline await bobFg.close() @@ -362,7 +350,8 @@ describe('feed generation', () => { }) }) - describe('getPopularFeedGenerators', () => { + // @TODO support from dataplane + describe.skip('getPopularFeedGenerators', () => { it('gets popular feed generators', async () => { const resEven = await agent.api.app.bsky.unspecced.getPopularFeedGenerators( diff --git a/packages/bsky/tests/handle-invalidation.test.ts b/packages/bsky/tests/handle-invalidation.test.ts index cee9cfb61df..3919baf899d 100644 --- a/packages/bsky/tests/handle-invalidation.test.ts +++ b/packages/bsky/tests/handle-invalidation.test.ts @@ -26,10 +26,8 @@ describe('handle invalidation', () => { alice = sc.dids.alice bob = sc.dids.bob - const origResolve = network.bsky.indexer.ctx.idResolver.handle.resolve - network.bsky.indexer.ctx.idResolver.handle.resolve = async ( - handle: string, - ) => { + const origResolve = network.bsky.ctx.idResolver.handle.resolve + network.bsky.ctx.idResolver.handle.resolve = async (handle: string) => { if (mockHandles[handle] === null) { return undefined } else if (mockHandles[handle]) { @@ -45,7 +43,7 @@ describe('handle invalidation', () => { const backdateIndexedAt = async (did: string) => { const TWO_DAYS_AGO = new Date(Date.now() - 2 * DAY).toISOString() - await network.bsky.ctx.db + await network.bsky.db .getPrimary() .db.updateTable('actor') .set({ indexedAt: TWO_DAYS_AGO }) diff --git a/packages/bsky/tests/image/server.test.ts b/packages/bsky/tests/image/server.test.ts index 3bce638ce45..528f538d44f 100644 --- a/packages/bsky/tests/image/server.test.ts +++ b/packages/bsky/tests/image/server.test.ts @@ -19,7 +19,6 @@ describe('image processing server', () => { const sc = network.getSeedClient() await basicSeed(sc) await network.processAll() - await network.bsky.processAll() fileDid = sc.dids.carol fileCid = sc.posts[fileDid][0].images[0].image.ref client = axios.create({ diff --git a/packages/bsky/tests/indexing.test.ts b/packages/bsky/tests/indexing.test.ts index f874a084567..6c86beba966 100644 --- a/packages/bsky/tests/indexing.test.ts +++ b/packages/bsky/tests/indexing.test.ts @@ -16,13 +16,14 @@ import { forSnapshot } from './_util' import usersSeed from './seeds/users' import basicSeed from './seeds/basic' import { ids } from '../src/lexicon/lexicons' -import { Database } from '../src/db' +import { Database } from '../src/data-plane/server/db' describe('indexing', () => { let network: TestNetwork let agent: AtpAgent let pdsAgent: AtpAgent let sc: SeedClient + let db: Database beforeAll(async () => { network = await TestNetwork.create({ @@ -31,12 +32,11 @@ describe('indexing', () => { agent = network.bsky.getClient() pdsAgent = network.pds.getClient() sc = network.getSeedClient() + db = network.bsky.db.getPrimary() await usersSeed(sc) // Data in tests is not processed from subscription await network.processAll() - await network.bsky.ingester.sub.destroy() - await network.bsky.indexer.sub.destroy() - await network.bsky.processAll() + await network.bsky.sub.destroy() }) afterAll(async () => { @@ -44,7 +44,6 @@ describe('indexing', () => { }) it('indexes posts.', async () => { - const { db, services } = network.bsky.indexer.ctx const createdAt = new Date().toISOString() const createRecord = await prepareCreate({ did: sc.dids.alice, @@ -95,7 +94,7 @@ describe('indexing', () => { }) // Create - await services.indexing(db).indexRecord(...createRecord) + await network.bsky.sub.indexingSvc.indexRecord(...createRecord) const getAfterCreate = await agent.api.app.bsky.feed.getPostThread( { uri: uri.toString() }, @@ -105,7 +104,7 @@ describe('indexing', () => { const createNotifications = await getNotifications(db, uri) // Update - await services.indexing(db).indexRecord(...updateRecord) + await network.bsky.sub.indexingSvc.indexRecord(...updateRecord) const getAfterUpdate = await agent.api.app.bsky.feed.getPostThread( { uri: uri.toString() }, @@ -115,7 +114,7 @@ describe('indexing', () => { const updateNotifications = await getNotifications(db, uri) // Delete - await services.indexing(db).deleteRecord(...deleteRecord) + await network.bsky.sub.indexingSvc.deleteRecord(...deleteRecord) const getAfterDelete = agent.api.app.bsky.feed.getPostThread( { uri: uri.toString() }, @@ -134,7 +133,6 @@ describe('indexing', () => { }) it('indexes profiles.', async () => { - const { db, services } = network.bsky.indexer.ctx const createRecord = await prepareCreate({ did: sc.dids.dan, collection: ids.AppBskyActorProfile, @@ -161,7 +159,7 @@ describe('indexing', () => { }) // Create - await services.indexing(db).indexRecord(...createRecord) + await network.bsky.sub.indexingSvc.indexRecord(...createRecord) const getAfterCreate = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.dan }, @@ -170,7 +168,7 @@ describe('indexing', () => { expect(forSnapshot(getAfterCreate.data)).toMatchSnapshot() // Update - await services.indexing(db).indexRecord(...updateRecord) + await network.bsky.sub.indexingSvc.indexRecord(...updateRecord) const getAfterUpdate = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.dan }, @@ -179,7 +177,7 @@ describe('indexing', () => { expect(forSnapshot(getAfterUpdate.data)).toMatchSnapshot() // Delete - await services.indexing(db).deleteRecord(...deleteRecord) + await network.bsky.sub.indexingSvc.deleteRecord(...deleteRecord) const getAfterDelete = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.dan }, @@ -189,7 +187,6 @@ describe('indexing', () => { }) it('handles post aggregations out of order.', async () => { - const { db, services } = network.bsky.indexer.ctx const createdAt = new Date().toISOString() const originalPost = await prepareCreate({ did: sc.dids.alice, @@ -236,11 +233,11 @@ describe('indexing', () => { } as AppBskyFeedRepost.Record, }) // reply, like, and repost indexed orior to the original post - await services.indexing(db).indexRecord(...reply) - await services.indexing(db).indexRecord(...like) - await services.indexing(db).indexRecord(...repost) - await services.indexing(db).indexRecord(...originalPost) - await network.bsky.processAll() + await network.bsky.sub.indexingSvc.indexRecord(...reply) + await network.bsky.sub.indexingSvc.indexRecord(...like) + await network.bsky.sub.indexingSvc.indexRecord(...repost) + await network.bsky.sub.indexingSvc.indexRecord(...originalPost) + await network.bsky.sub.background.processAll() const agg = await db.db .selectFrom('post_agg') .selectAll() @@ -260,14 +257,13 @@ describe('indexing', () => { rkey: uri.rkey, }) } - await services.indexing(db).deleteRecord(...del(reply[0])) - await services.indexing(db).deleteRecord(...del(like[0])) - await services.indexing(db).deleteRecord(...del(repost[0])) - await services.indexing(db).deleteRecord(...del(originalPost[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(reply[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(like[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(repost[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(originalPost[0])) }) it('does not notify user of own like or repost', async () => { - const { db, services } = network.bsky.indexer.ctx const createdAt = new Date().toISOString() const originalPost = await prepareCreate({ @@ -325,13 +321,12 @@ describe('indexing', () => { } as AppBskyFeedRepost.Record, }) - await services.indexing(db).indexRecord(...originalPost) - await services.indexing(db).indexRecord(...ownLike) - await services.indexing(db).indexRecord(...ownRepost) - await services.indexing(db).indexRecord(...aliceLike) - await services.indexing(db).indexRecord(...aliceRepost) - - await network.bsky.processAll() + await network.bsky.sub.indexingSvc.indexRecord(...originalPost) + await network.bsky.sub.indexingSvc.indexRecord(...ownLike) + await network.bsky.sub.indexingSvc.indexRecord(...ownRepost) + await network.bsky.sub.indexingSvc.indexRecord(...aliceLike) + await network.bsky.sub.indexingSvc.indexRecord(...aliceRepost) + await network.bsky.sub.background.processAll() const { data: { notifications }, @@ -356,15 +351,14 @@ describe('indexing', () => { }) } - await services.indexing(db).deleteRecord(...del(ownLike[0])) - await services.indexing(db).deleteRecord(...del(ownRepost[0])) - await services.indexing(db).deleteRecord(...del(aliceLike[0])) - await services.indexing(db).deleteRecord(...del(aliceRepost[0])) - await services.indexing(db).deleteRecord(...del(originalPost[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(ownLike[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(ownRepost[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(aliceLike[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(aliceRepost[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(originalPost[0])) }) it('handles profile aggregations out of order.', async () => { - const { db, services } = network.bsky.indexer.ctx const createdAt = new Date().toISOString() const unknownDid = 'did:example:unknown' const follow = await prepareCreate({ @@ -376,8 +370,8 @@ describe('indexing', () => { createdAt, } as AppBskyGraphFollow.Record, }) - await services.indexing(db).indexRecord(...follow) - await network.bsky.processAll() + await network.bsky.sub.indexingSvc.indexRecord(...follow) + await network.bsky.sub.background.processAll() const agg = await db.db .selectFrom('profile_agg') .select(['did', 'followersCount']) @@ -395,22 +389,19 @@ describe('indexing', () => { rkey: uri.rkey, }) } - await services.indexing(db).deleteRecord(...del(follow[0])) + await network.bsky.sub.indexingSvc.deleteRecord(...del(follow[0])) }) describe('indexRepo', () => { beforeAll(async () => { - network.bsky.indexer.sub.resume() - network.bsky.ingester.sub.resume() + network.bsky.sub.run() await basicSeed(sc, false) await network.processAll() - await network.bsky.ingester.sub.destroy() - await network.bsky.indexer.sub.destroy() - await network.bsky.processAll() + await network.bsky.sub.destroy() + await network.bsky.sub.background.processAll() }) it('preserves indexes when no record changes.', async () => { - const { db, services } = network.bsky.indexer.ctx // Mark originals const { data: origProfile } = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, @@ -429,8 +420,8 @@ describe('indexing', () => { await pdsAgent.api.com.atproto.sync.getLatestCommit({ did: sc.dids.alice, }) - await services.indexing(db).indexRepo(sc.dids.alice, commit.cid) - await network.bsky.processAll() + await network.bsky.sub.indexingSvc.indexRepo(sc.dids.alice, commit.cid) + await network.bsky.sub.background.processAll() // Check const { data: profile } = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, @@ -450,7 +441,6 @@ describe('indexing', () => { }) it('updates indexes when records change.', async () => { - const { db, services } = network.bsky.indexer.ctx // Update profile await pdsAgent.api.com.atproto.repo.putRecord( { @@ -474,8 +464,8 @@ describe('indexing', () => { await pdsAgent.api.com.atproto.sync.getLatestCommit({ did: sc.dids.alice, }) - await services.indexing(db).indexRepo(sc.dids.alice, commit.cid) - await network.bsky.processAll() + await network.bsky.sub.indexingSvc.indexRepo(sc.dids.alice, commit.cid) + await network.bsky.sub.background.processAll() // Check const { data: profile } = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, @@ -497,7 +487,6 @@ describe('indexing', () => { }) it('skips invalid records.', async () => { - const { db, services } = network.bsky.indexer.ctx const { accountManager } = network.pds.ctx // const { db: pdsDb, services: pdsServices } = network.pds.ctx // Create a good and a bad post record @@ -533,7 +522,7 @@ describe('indexing', () => { await pdsAgent.api.com.atproto.sync.getLatestCommit({ did: sc.dids.alice, }) - await services.indexing(db).indexRepo(sc.dids.alice, commit.cid) + await network.bsky.sub.indexingSvc.indexRepo(sc.dids.alice, commit.cid) // Check const getGoodPost = agent.api.app.bsky.feed.getPostThread( { uri: writes[0].uri.toString(), depth: 0 }, @@ -558,7 +547,6 @@ describe('indexing', () => { } it('indexes handle for a fresh did', async () => { - const { db, services } = network.bsky.indexer.ctx const now = new Date().toISOString() const sessionAgent = new AtpAgent({ service: network.pds.url }) const { @@ -569,12 +557,11 @@ describe('indexing', () => { password: 'password', }) await expect(getIndexedHandle(did)).rejects.toThrow('Profile not found') - await services.indexing(db).indexHandle(did, now) + await network.bsky.sub.indexingSvc.indexHandle(did, now) await expect(getIndexedHandle(did)).resolves.toEqual('did1.test') }) it('reindexes handle for existing did when forced', async () => { - const { db, services } = network.bsky.indexer.ctx const now = new Date().toISOString() const sessionAgent = new AtpAgent({ service: network.pds.url }) const { @@ -584,19 +571,18 @@ describe('indexing', () => { handle: 'did2.test', password: 'password', }) - await services.indexing(db).indexHandle(did, now) + await network.bsky.sub.indexingSvc.indexHandle(did, now) await expect(getIndexedHandle(did)).resolves.toEqual('did2.test') await sessionAgent.com.atproto.identity.updateHandle({ handle: 'did2-updated.test', }) - await services.indexing(db).indexHandle(did, now) + await network.bsky.sub.indexingSvc.indexHandle(did, now) await expect(getIndexedHandle(did)).resolves.toEqual('did2.test') // Didn't update, not forced - await services.indexing(db).indexHandle(did, now, true) + await network.bsky.sub.indexingSvc.indexHandle(did, now, true) await expect(getIndexedHandle(did)).resolves.toEqual('did2-updated.test') }) it('handles profile aggregations out of order', async () => { - const { db, services } = network.bsky.indexer.ctx const now = new Date().toISOString() const sessionAgent = new AtpAgent({ service: network.pds.url }) const { @@ -615,9 +601,9 @@ describe('indexing', () => { createdAt: now, } as AppBskyGraphFollow.Record, }) - await services.indexing(db).indexRecord(...follow) - await services.indexing(db).indexHandle(did, now) - await network.bsky.processAll() + await network.bsky.sub.indexingSvc.indexRecord(...follow) + await network.bsky.sub.indexingSvc.indexHandle(did, now) + await network.bsky.sub.background.processAll() const agg = await db.db .selectFrom('profile_agg') .select(['did', 'followersCount']) @@ -632,13 +618,12 @@ describe('indexing', () => { describe('tombstoneActor', () => { it('does not unindex actor when they are still being hosted by their pds', async () => { - const { db, services } = network.bsky.indexer.ctx const { data: profileBefore } = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, { headers: await network.serviceHeaders(sc.dids.bob) }, ) // Attempt indexing tombstone - await services.indexing(db).tombstoneActor(sc.dids.alice) + await network.bsky.sub.indexingSvc.tombstoneActor(sc.dids.alice) const { data: profileAfter } = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, { headers: await network.serviceHeaders(sc.dids.bob) }, @@ -647,7 +632,6 @@ describe('indexing', () => { }) it('unindexes actor when they are no longer hosted by their pds', async () => { - const { db, services } = network.bsky.indexer.ctx const { alice } = sc.dids const getProfileBefore = agent.api.app.bsky.actor.getProfile( { actor: alice }, @@ -666,7 +650,7 @@ describe('indexing', () => { }) await network.pds.ctx.backgroundQueue.processAll() // Index tombstone - await services.indexing(db).tombstoneActor(alice) + await network.bsky.sub.indexingSvc.tombstoneActor(alice) const getProfileAfter = agent.api.app.bsky.actor.getProfile( { actor: alice }, { headers: await network.serviceHeaders(sc.dids.bob) }, diff --git a/packages/bsky/tests/server.test.ts b/packages/bsky/tests/server.test.ts index 3f54b2a37bd..3084d8f2727 100644 --- a/packages/bsky/tests/server.test.ts +++ b/packages/bsky/tests/server.test.ts @@ -5,6 +5,7 @@ import { TestNetwork } from '@atproto/dev-env' import { handler as errorHandler } from '../src/error' import { Database } from '../src' import basicSeed from './seeds/basic' +import { once } from 'events' describe('server', () => { let network: TestNetwork @@ -19,7 +20,7 @@ describe('server', () => { await basicSeed(sc) await network.processAll() alice = sc.dids.alice - db = network.bsky.ctx.db.getPrimary() + db = network.bsky.db.getPrimary() }) afterAll(async () => { @@ -57,7 +58,7 @@ describe('server', () => { it('healthcheck succeeds when database is available.', async () => { const { data, status } = await axios.get(`${network.bsky.url}/xrpc/_health`) expect(status).toEqual(200) - expect(data).toEqual({ version: '0.0.0' }) + expect(data).toEqual({ version: 'unknown' }) }) // TODO(bsky) check on a different endpoint that accepts json, currently none. @@ -108,10 +109,9 @@ describe('server', () => { expect(res.headers['content-encoding']).toBeUndefined() }) - it('healthcheck fails when database is unavailable.', async () => { - await network.bsky.ingester.sub.destroy() - await network.bsky.indexer.sub.destroy() - await db.close() + it('healthcheck fails when dataplane is unavailable.', async () => { + const { port } = network.bsky.dataplane.server.address() as AddressInfo + await network.bsky.dataplane.destroy() let error: AxiosError try { await axios.get(`${network.bsky.url}/xrpc/_health`) @@ -122,10 +122,14 @@ describe('server', () => { } else { throw err } + } finally { + // restart dataplane server to allow test suite to cleanup + network.bsky.dataplane.server.listen(port) + await once(network.bsky.dataplane.server, 'listening') } expect(error.response?.status).toEqual(503) expect(error.response?.data).toEqual({ - version: '0.0.0', + version: 'unknown', error: 'Service Unavailable', }) }) diff --git a/packages/bsky/tests/subscription/repo.test.ts b/packages/bsky/tests/subscription/repo.test.ts index 9d40af78c00..5b03afb4b2c 100644 --- a/packages/bsky/tests/subscription/repo.test.ts +++ b/packages/bsky/tests/subscription/repo.test.ts @@ -7,12 +7,11 @@ import { cborDecode, cborEncode } from '@atproto/common' import { DatabaseSchemaType } from '../../src/data-plane/server/db/database-schema' import { ids } from '../../src/lexicon/lexicons' import { forSnapshot } from '../_util' -import { AppContext, Database } from '../../src' +import { Database } from '../../src' import basicSeed from '../seeds/basic' describe('sync', () => { let network: TestNetwork - let ctx: AppContext let pdsAgent: AtpAgent let sc: SeedClient @@ -20,7 +19,6 @@ describe('sync', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_subscription_repo', }) - ctx = network.bsky.ctx pdsAgent = network.pds.getClient() sc = network.getSeedClient() await basicSeed(sc) @@ -31,7 +29,7 @@ describe('sync', () => { }) it('indexes permit history being replayed.', async () => { - const db = ctx.db.getPrimary() + const db = network.bsky.db.getPrimary() // Generate some modifications and dupes const { alice, bob, carol, dan } = sc.dids @@ -63,16 +61,12 @@ describe('sync', () => { const originalTableDump = await getTableDump() // Reprocess repos via sync subscription, on top of existing indices - await network.bsky.ingester.sub.destroy() - await network.bsky.indexer.sub.destroy() - // Hard reset of state in redis - await network.bsky.ingester.sub.resetCursor() - const indexerSub = network.bsky.indexer.sub - const partition = indexerSub.partitions.get(0) - await network.bsky.indexer.ctx.redis.del(partition.key) + await network.bsky.sub.destroy() + // Hard reset of state + network.bsky.sub.cursor = 0 + network.bsky.sub.seenSeq = null // Boot streams back up - network.bsky.indexer.sub.resume() - network.bsky.ingester.sub.resume() + network.bsky.sub.run() await network.processAll() // Permissive of indexedAt times changing @@ -103,7 +97,9 @@ describe('sync', () => { }) await network.processAll() // confirm jack was indexed as an actor despite the bad event - const actors = await dumpTable(ctx.db.getPrimary(), 'actor', ['did']) + const actors = await dumpTable(network.bsky.db.getPrimary(), 'actor', [ + 'did', + ]) expect(actors.map((a) => a.handle)).toContain('jack.test') network.pds.ctx.sequencer.sequenceCommit = sequenceCommitOrig }) diff --git a/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap b/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap index f7a61d82547..3f775a98661 100644 --- a/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap @@ -84,24 +84,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -327,16 +310,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(6)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(6)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -493,24 +467,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(0)", - "val": "test-label", - }, - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(0)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1231,24 +1188,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1669,24 +1609,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1915,16 +1838,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(6)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(8)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap b/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap index e5e068353ea..93a646c8a16 100644 --- a/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap @@ -98,16 +98,7 @@ Object { }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(0)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap b/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap index 079cdbe60a5..f47f3a10910 100644 --- a/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap @@ -98,16 +98,7 @@ Object { }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(0)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -296,24 +287,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(6)", - "val": "test-label", - }, - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(6)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap b/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap index d6712c89c56..56fd2bf4899 100644 --- a/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap @@ -85,24 +85,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -216,24 +199,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -511,16 +477,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(6)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(8)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap b/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap index a7b34c07dc0..31eb91a9d29 100644 --- a/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap @@ -237,24 +237,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(5)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(9)", - "val": "test-label", - }, - Object { - "cid": "cids(5)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(9)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap b/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap index 0e1c14c2696..58849599bfe 100644 --- a/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap @@ -212,24 +212,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap b/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap index bce3d4e5139..2f18110bc14 100644 --- a/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap @@ -245,24 +245,7 @@ Array [ "cid": "cids(12)", "indexedAt": "1970-01-01T00:00:00.000Z", "isRead": false, - "labels": Array [ - Object { - "cid": "cids(12)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(14)", - "val": "test-label", - }, - Object { - "cid": "cids(12)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(14)", - "val": "test-label-2", - }, - ], + "labels": Array [], "reason": "reply", "reasonSubject": "record(4)", "record": Object { @@ -689,24 +672,7 @@ Array [ "cid": "cids(15)", "indexedAt": "1970-01-01T00:00:00.000Z", "isRead": false, - "labels": Array [ - Object { - "cid": "cids(15)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(17)", - "val": "test-label", - }, - Object { - "cid": "cids(15)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(17)", - "val": "test-label-2", - }, - ], + "labels": Array [], "reason": "reply", "reasonSubject": "record(4)", "record": Object { @@ -916,16 +882,7 @@ Array [ "cid": "cids(5)", "indexedAt": "1970-01-01T00:00:00.000Z", "isRead": false, - "labels": Array [ - Object { - "cid": "cids(5)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label", - }, - ], + "labels": Array [], "reason": "quote", "reasonSubject": "record(1)", "record": Object { diff --git a/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap b/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap index 85c3493e092..cb18eeafbe4 100644 --- a/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap @@ -79,24 +79,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -312,24 +295,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(7)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(7)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -550,24 +516,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(7)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(7)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1102,24 +1051,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1302,24 +1234,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - Object { - "cid": "cids(3)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap b/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap index eb8934f00bb..e0c88a23363 100644 --- a/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap @@ -273,16 +273,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(5)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(5)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -848,16 +839,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(6)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(8)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -1319,24 +1301,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1755,24 +1720,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -2011,24 +1959,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -2308,16 +2239,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(11)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(13)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -3134,24 +3056,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(9)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(10)", - "val": "test-label", - }, - Object { - "cid": "cids(9)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(10)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -3399,24 +3304,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(9)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(10)", - "val": "test-label", - }, - Object { - "cid": "cids(9)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(10)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -3705,16 +3593,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(11)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(13)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -4327,24 +4206,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(9)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(10)", - "val": "test-label", - }, - Object { - "cid": "cids(9)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(10)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -4716,16 +4578,7 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(11)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(13)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -5091,24 +4944,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -5284,24 +5120,7 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(4)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/actor-search.test.ts b/packages/bsky/tests/views/actor-search.test.ts index 70f8862f7d7..0a51556c591 100644 --- a/packages/bsky/tests/views/actor-search.test.ts +++ b/packages/bsky/tests/views/actor-search.test.ts @@ -20,11 +20,11 @@ describe.skip('pds actor search views', () => { sc = network.getSeedClient() await wait(50) // allow pending sub to be established - await network.bsky.ingester.sub.destroy() + await network.bsky.sub.destroy() await usersBulkSeed(sc) // Skip did/handle resolution for expediency - const db = network.bsky.ctx.db.getPrimary() + const db = network.bsky.db.getPrimary() const now = new Date().toISOString() await db.db .insertInto('actor') @@ -39,9 +39,8 @@ describe.skip('pds actor search views', () => { .execute() // Process remaining profiles - network.bsky.ingester.sub.resume() + network.bsky.sub.run() await network.processAll(50000) - await network.bsky.processAll() headers = await network.serviceHeaders(Object.values(sc.dids)[0]) }) @@ -239,21 +238,10 @@ describe.skip('pds actor search views', () => { }) it('search blocks by actor takedown', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids['cara-wiegand69.test'], - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.server.ctx.dataplane.updateTakedown({ + actorDid: sc.dids['cara-wiegand69.test'], + takenDown: true, + }) const result = await agent.api.app.bsky.actor.searchActorsTypeahead( { term: 'car' }, { headers }, diff --git a/packages/bsky/tests/views/admin/repo-search.test.ts b/packages/bsky/tests/views/admin/repo-search.test.ts deleted file mode 100644 index 6d9e8468dc1..00000000000 --- a/packages/bsky/tests/views/admin/repo-search.test.ts +++ /dev/null @@ -1,133 +0,0 @@ -import AtpAgent, { ComAtprotoAdminSearchRepos } from '@atproto/api' -import { wait } from '@atproto/common' -import { TestNetwork, SeedClient } from '@atproto/dev-env' -import usersBulkSeed from '../../seeds/users-bulk' - -describe('pds admin repo search views', () => { - let network: TestNetwork - let agent: AtpAgent - let sc: SeedClient - let headers: { [s: string]: string } - // In results that don't have a related profile record, we will only have handle but not a name - // And names are usually capitalized on each word so the comparison is done on lowercase version - const handleOrNameStartsWith = - (term: string) => (handleOrName: (string | undefined)[]) => - !!handleOrName.find((str) => - str?.toLowerCase().includes(term.toLowerCase()), - ) - const resultToHandlesAndNames = ( - result: ComAtprotoAdminSearchRepos.Response, - ) => - result.data.repos.map((u: any) => [ - u.handle, - (u.relatedRecords[0] as Record)?.displayName, - ]) - - beforeAll(async () => { - network = await TestNetwork.create({ - dbPostgresSchema: 'bsky_views_repo_search', - }) - agent = network.bsky.getClient() - sc = network.getSeedClient() - - await wait(100) // allow pending sub to be established - await network.bsky.ingester.sub.destroy() - await usersBulkSeed(sc) - - // Skip did/handle resolution for expediency - const db = network.bsky.ctx.db.getPrimary() - const now = new Date().toISOString() - await db.db - .insertInto('actor') - .values( - Object.entries(sc.dids).map(([handle, did]) => ({ - did, - handle, - indexedAt: now, - })), - ) - .onConflict((oc) => oc.doNothing()) - .execute() - - // Process remaining profiles - network.bsky.ingester.sub.resume() - await network.processAll(50000) - headers = await network.adminHeaders({}) - }) - - afterAll(async () => { - await network.close() - }) - - it('gives relevant results when searched by handle', async () => { - const term = 'car' - const result = await agent.api.com.atproto.admin.searchRepos( - { term }, - { headers }, - ) - - const shouldContain = [ - // Present despite repo takedown - // First item in the array because of direct handle match - 'cara-wiegand69.test', - 'carlos6.test', - 'aliya-hodkiewicz.test', // Carlton Abernathy IV - 'eudora-dietrich4.test', // Carol Littel - 'carolina-mcdermott77.test', - 'shane-torphy52.test', // Sadie Carter - // Last item in the array because handle and display name none match very close to the the search term - 'cayla-marquardt39.test', - ] - - const handlesAndNames = resultToHandlesAndNames(result) - const handles = handlesAndNames.map(([handle]) => handle) - // Assert that all matches are found - shouldContain.forEach((handle) => expect(handles).toContain(handle)) - // Assert that the order is correct, showing the closest match by handle first - const containsTerm = handleOrNameStartsWith(term) - expect(containsTerm(handlesAndNames[0])).toBeTruthy() - expect( - containsTerm(handlesAndNames[handlesAndNames.length - 1]), - ).toBeFalsy() - }) - - it('pagination respects matching order when searched by handle', async () => { - const term = 'car' - const resultPageOne = await agent.api.com.atproto.admin.searchRepos( - { term, limit: 4 }, - { headers }, - ) - const resultPageTwo = await agent.api.com.atproto.admin.searchRepos( - { term, limit: 4, cursor: resultPageOne.data.cursor }, - { headers }, - ) - - const handlesAndNamesPageOne = resultToHandlesAndNames(resultPageOne) - const handlesAndNamesPageTwo = resultToHandlesAndNames(resultPageTwo) - const containsTerm = handleOrNameStartsWith(term) - - // First result of first page always has matches either handle or did - expect(containsTerm(handlesAndNamesPageOne[0])).toBeTruthy() - // Since we only get 4 items per page max and know that among the test dataset - // at least 4 users have the term in handle or profile, last item in first page - // should contain the term - expect( - containsTerm(handlesAndNamesPageOne[handlesAndNamesPageOne.length - 1]), - ).toBeTruthy() - // However, the last item of second page, should not contain the term - expect( - containsTerm(handlesAndNamesPageTwo[handlesAndNamesPageTwo.length - 1]), - ).toBeFalsy() - }) - - it('gives relevant results when searched by did', async () => { - const term = sc.dids['cara-wiegand69.test'] - const res = await agent.api.com.atproto.admin.searchRepos( - { term }, - { headers }, - ) - - expect(res.data.repos.length).toEqual(1) - expect(res.data.repos[0].did).toEqual(term) - }) -}) diff --git a/packages/bsky/tests/views/author-feed.test.ts b/packages/bsky/tests/views/author-feed.test.ts index b8fade87c54..a1496ef07eb 100644 --- a/packages/bsky/tests/views/author-feed.test.ts +++ b/packages/bsky/tests/views/author-feed.test.ts @@ -145,21 +145,10 @@ describe('pds author feed views', () => { expect(preBlock.feed.length).toBeGreaterThan(0) - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: alice, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: alice, + takenDown: true, + }) const attempt = agent.api.app.bsky.feed.getAuthorFeed( { actor: alice }, @@ -168,21 +157,10 @@ describe('pds author feed views', () => { await expect(attempt).rejects.toThrow('Profile not found') // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: alice, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: alice, + takenDown: false, + }) }) it('blocked by record takedown.', async () => { @@ -195,22 +173,10 @@ describe('pds author feed views', () => { const post = preBlock.feed[0].post - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uri, - cid: post.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: post.uri, + takenDown: true, + }) const { data: postBlock } = await agent.api.app.bsky.feed.getAuthorFeed( { actor: alice }, @@ -221,22 +187,10 @@ describe('pds author feed views', () => { expect(postBlock.feed.map((item) => item.post.uri)).not.toContain(post.uri) // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uri, - cid: post.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: post.uri, + takenDown: false, + }) }) it('can filter by posts_with_media', async () => { diff --git a/packages/bsky/tests/views/follows.test.ts b/packages/bsky/tests/views/follows.test.ts index 38d08bc5633..819f88e07d2 100644 --- a/packages/bsky/tests/views/follows.test.ts +++ b/packages/bsky/tests/views/follows.test.ts @@ -119,21 +119,10 @@ describe('pds follow views', () => { }) it('blocks followers by actor takedown', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.dan, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: sc.dids.dan, + takenDown: true, + }) const aliceFollowers = await agent.api.app.bsky.graph.getFollowers( { actor: sc.dids.alice }, @@ -144,21 +133,10 @@ describe('pds follow views', () => { sc.dids.dan, ) - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.dan, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: sc.dids.dan, + takenDown: false, + }) }) it('fetches follows', async () => { @@ -251,21 +229,10 @@ describe('pds follow views', () => { }) it('blocks follows by actor takedown', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.dan, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: sc.dids.dan, + takenDown: true, + }) const aliceFollows = await agent.api.app.bsky.graph.getFollows( { actor: sc.dids.alice }, @@ -276,20 +243,9 @@ describe('pds follow views', () => { sc.dids.dan, ) - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: sc.dids.dan, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: sc.dids.dan, + takenDown: false, + }) }) }) diff --git a/packages/bsky/tests/views/list-feed.test.ts b/packages/bsky/tests/views/list-feed.test.ts index b8cd977922b..1ae076b2520 100644 --- a/packages/bsky/tests/views/list-feed.test.ts +++ b/packages/bsky/tests/views/list-feed.test.ts @@ -112,21 +112,10 @@ describe('list feed views', () => { }) it('blocks posts by actor takedown', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: bob, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: bob, + takenDown: true, + }) const res = await agent.api.app.bsky.feed.getListFeed({ list: listRef.uriStr, @@ -135,41 +124,18 @@ describe('list feed views', () => { expect(hasBob).toBe(false) // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: bob, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: bob, + takenDown: false, + }) }) it('blocks posts by record takedown.', async () => { const postRef = sc.replies[bob][0].ref // Post and reply parent - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: true, + }) const res = await agent.api.app.bsky.feed.getListFeed({ list: listRef.uriStr, @@ -180,21 +146,9 @@ describe('list feed views', () => { expect(hasPost).toBe(false) // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: false, + }) }) }) diff --git a/packages/bsky/tests/views/mute-lists.test.ts b/packages/bsky/tests/views/mute-lists.test.ts index f16d59cd405..7346382c5d2 100644 --- a/packages/bsky/tests/views/mute-lists.test.ts +++ b/packages/bsky/tests/views/mute-lists.test.ts @@ -196,7 +196,6 @@ describe('bsky views with mutes from mute lists', () => { // unfollow so they _would_ show up in suggestions if not for mute await sc.unfollow(dan, carol) await network.processAll() - await network.bsky.processAll() const res = await agent.api.app.bsky.actor.getSuggestions( { diff --git a/packages/bsky/tests/views/mutes.test.ts b/packages/bsky/tests/views/mutes.test.ts index 6a00c427124..88e8ce2cdc9 100644 --- a/packages/bsky/tests/views/mutes.test.ts +++ b/packages/bsky/tests/views/mutes.test.ts @@ -225,6 +225,6 @@ describe('mute views', () => { encoding: 'application/json', }, ) - await expect(promise).rejects.toThrow('Cannot mute oneself') + await expect(promise).rejects.toThrow() // @TODO check error message w/ grpc error passthru }) }) diff --git a/packages/bsky/tests/views/notifications.test.ts b/packages/bsky/tests/views/notifications.test.ts index 7449d764671..0e85c3f7613 100644 --- a/packages/bsky/tests/views/notifications.test.ts +++ b/packages/bsky/tests/views/notifications.test.ts @@ -20,7 +20,6 @@ describe('notification views', () => { sc = network.getSeedClient() await basicSeed(sc) await network.processAll() - await network.bsky.processAll() alice = sc.dids.alice }) @@ -73,7 +72,6 @@ describe('notification views', () => { 'indeed', ) await network.processAll() - await network.bsky.processAll() const notifCountAlice = await agent.api.app.bsky.notification.getUnreadCount( @@ -97,7 +95,6 @@ describe('notification views', () => { await sc.deletePost(sc.dids.alice, root.ref.uri) const second = await sc.reply(sc.dids.carol, root.ref, first.ref, 'second') await network.processAll() - await network.bsky.processAll() const notifsAlice = await agent.api.app.bsky.notification.listNotifications( {}, @@ -234,22 +231,10 @@ describe('notification views', () => { const postRef2 = sc.posts[sc.dids.dan][1].ref // Mention await Promise.all( [postRef1, postRef2].map((postRef) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: true, + }), ), ) @@ -270,22 +255,10 @@ describe('notification views', () => { // Cleanup await Promise.all( [postRef1, postRef2].map((postRef) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: false, + }), ), ) }) diff --git a/packages/bsky/tests/views/profile.test.ts b/packages/bsky/tests/views/profile.test.ts index 726fb990a0d..03a57373dd1 100644 --- a/packages/bsky/tests/views/profile.test.ts +++ b/packages/bsky/tests/views/profile.test.ts @@ -184,21 +184,11 @@ describe('pds profile views', () => { }) it('blocked by actor takedown', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: alice, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: alice, + takenDown: true, + }) + const promise = agent.api.app.bsky.actor.getProfile( { actor: alice }, { headers: await network.serviceHeaders(bob) }, @@ -207,21 +197,10 @@ describe('pds profile views', () => { await expect(promise).rejects.toThrow('Account has been taken down') // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: alice, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: alice, + takenDown: false, + }) }) async function updateProfile(did: string, record: Record) { diff --git a/packages/bsky/tests/views/suggested-follows.test.ts b/packages/bsky/tests/views/suggested-follows.test.ts index e9aa3248df7..2be077e7dc6 100644 --- a/packages/bsky/tests/views/suggested-follows.test.ts +++ b/packages/bsky/tests/views/suggested-follows.test.ts @@ -17,7 +17,6 @@ describe('suggested follows', () => { sc = network.getSeedClient() await likesSeed(sc) await network.processAll() - await network.bsky.processAll() const suggestions = [ { did: sc.dids.alice, order: 1 }, @@ -27,7 +26,7 @@ describe('suggested follows', () => { { did: sc.dids.fred, order: 5 }, { did: sc.dids.gina, order: 6 }, ] - await network.bsky.ctx.db + await network.bsky.db .getPrimary() .db.insertInto('suggested_follow') .values(suggestions) diff --git a/packages/bsky/tests/views/suggestions.test.ts b/packages/bsky/tests/views/suggestions.test.ts index 4253f528b13..d524a420295 100644 --- a/packages/bsky/tests/views/suggestions.test.ts +++ b/packages/bsky/tests/views/suggestions.test.ts @@ -16,7 +16,6 @@ describe('pds user search views', () => { sc = network.getSeedClient() await basicSeed(sc) await network.processAll() - await network.bsky.processAll() const suggestions = [ { did: sc.dids.alice, order: 1 }, @@ -25,7 +24,7 @@ describe('pds user search views', () => { { did: sc.dids.dan, order: 4 }, ] - await network.bsky.ctx.db + await network.bsky.db .getPrimary() .db.insertInto('suggested_follow') .values(suggestions) diff --git a/packages/bsky/tests/views/thread.test.ts b/packages/bsky/tests/views/thread.test.ts index 0f31c8b8815..2973a571ee9 100644 --- a/packages/bsky/tests/views/thread.test.ts +++ b/packages/bsky/tests/views/thread.test.ts @@ -114,7 +114,6 @@ describe('pds thread views', () => { ) indexes.aliceReplyReply = sc.replies[alice].length - 1 await network.processAll() - await network.bsky.processAll() const thread1 = await agent.api.app.bsky.feed.getPostThread( { uri: sc.posts[alice][indexes.aliceRoot].ref.uriStr }, @@ -124,7 +123,6 @@ describe('pds thread views', () => { await sc.deletePost(bob, sc.replies[bob][indexes.bobReply].ref.uri) await network.processAll() - await network.bsky.processAll() const thread2 = await agent.api.app.bsky.feed.getPostThread( { uri: sc.posts[alice][indexes.aliceRoot].ref.uriStr }, @@ -164,21 +162,10 @@ describe('pds thread views', () => { describe('takedown', () => { it('blocks post by actor', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: alice, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: alice, + takenDown: true, + }) // Same as shallow post thread test, minus alice const promise = agent.api.app.bsky.feed.getPostThread( @@ -191,39 +178,17 @@ describe('pds thread views', () => { ) // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: alice, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: alice, + takenDown: false, + }) }) it('blocks replies by actor', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: carol, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: carol, + takenDown: true, + }) // Same as deep post thread test, minus carol const thread = await agent.api.app.bsky.feed.getPostThread( @@ -234,39 +199,17 @@ describe('pds thread views', () => { expect(forSnapshot(thread.data.thread)).toMatchSnapshot() // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: carol, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: carol, + takenDown: false, + }) }) it('blocks ancestors by actor', async () => { - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: bob, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: bob, + takenDown: true, + }) // Same as ancestor post thread test, minus bob const thread = await agent.api.app.bsky.feed.getPostThread( @@ -277,41 +220,18 @@ describe('pds thread views', () => { expect(forSnapshot(thread.data.thread)).toMatchSnapshot() // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: bob, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + actorDid: bob, + takenDown: false, + }) }) it('blocks post by record', async () => { const postRef = sc.posts[alice][1].ref - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: true, + }) const promise = agent.api.app.bsky.feed.getPostThread( { depth: 1, uri: postRef.uriStr }, @@ -323,22 +243,10 @@ describe('pds thread views', () => { ) // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: false, + }) }) it('blocks ancestors by record', async () => { @@ -349,22 +257,10 @@ describe('pds thread views', () => { const parent = threadPreTakedown.data.thread.parent?.['post'] - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: parent.uri, - cid: parent.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: parent.uri, + takenDown: true, + }) // Same as ancestor post thread test, minus parent post const thread = await agent.api.app.bsky.feed.getPostThread( @@ -375,22 +271,10 @@ describe('pds thread views', () => { expect(forSnapshot(thread.data.thread)).toMatchSnapshot() // Cleanup - await agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: parent.uri, - cid: parent.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ) + await network.bsky.ctx.dataplane.updateTakedown({ + recordUri: parent.uri, + takenDown: false, + }) }) it('blocks replies by record', async () => { @@ -403,22 +287,10 @@ describe('pds thread views', () => { await Promise.all( [post1, post2].map((post) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uri, - cid: post.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + recordUri: post.uri, + takenDown: true, + }), ), ) @@ -433,24 +305,10 @@ describe('pds thread views', () => { // Cleanup await Promise.all( [post1, post2].map((post) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { - $type: 'com.atproto.admin.defs#modEventReverseTakedown', - }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: post.uri, - cid: post.cid, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + recordUri: post.uri, + takenDown: false, + }), ), ) }) diff --git a/packages/bsky/tests/views/timeline.test.ts b/packages/bsky/tests/views/timeline.test.ts index 5410d792a1f..93d1190b0eb 100644 --- a/packages/bsky/tests/views/timeline.test.ts +++ b/packages/bsky/tests/views/timeline.test.ts @@ -3,8 +3,10 @@ import AtpAgent from '@atproto/api' import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, getOriginator, paginateAll } from '../_util' import basicSeed from '../seeds/basic' -import { FeedAlgorithm } from '../../src/api/app/bsky/util/feed' import { FeedViewPost } from '../../src/lexicon/types/app/bsky/feed/defs' +import { PrimaryDatabase } from '../../src' + +const REVERSE_CHRON = 'reverse-chronological' describe('timeline views', () => { let network: TestNetwork @@ -32,23 +34,16 @@ describe('timeline views', () => { // Label posts as "kind" to check labels on embed views const labelPostA = sc.posts[bob][0].ref const labelPostB = sc.posts[carol][0].ref - await network.bsky.ctx.services - .label(network.bsky.ctx.db.getPrimary()) - .formatAndCreate( - network.bsky.ctx.cfg.labelerDid, - labelPostA.uriStr, - labelPostA.cidStr, - { create: ['kind'] }, - ) - await network.bsky.ctx.services - .label(network.bsky.ctx.db.getPrimary()) - .formatAndCreate( - network.bsky.ctx.cfg.labelerDid, - labelPostB.uriStr, - labelPostB.cidStr, - { create: ['kind'] }, - ) - await network.bsky.processAll() + await createLabel(network.bsky.db.getPrimary(), { + val: 'kind', + uri: labelPostA.uriStr, + cid: labelPostA.cidStr, + }) + await createLabel(network.bsky.db.getPrimary(), { + val: 'kind', + uri: labelPostB.uriStr, + cid: labelPostB.cidStr, + }) }) afterAll(async () => { @@ -68,7 +63,7 @@ describe('timeline views', () => { } const aliceTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(alice), }, @@ -78,7 +73,7 @@ describe('timeline views', () => { aliceTL.data.feed.forEach(expectOriginatorFollowedBy(alice)) const bobTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(bob), }, @@ -88,7 +83,7 @@ describe('timeline views', () => { bobTL.data.feed.forEach(expectOriginatorFollowedBy(bob)) const carolTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(carol), }, @@ -98,7 +93,7 @@ describe('timeline views', () => { carolTL.data.feed.forEach(expectOriginatorFollowedBy(carol)) const danTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(dan), }, @@ -116,7 +111,7 @@ describe('timeline views', () => { }, ) const reverseChronologicalTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(alice), }, @@ -129,7 +124,7 @@ describe('timeline views', () => { const paginator = async (cursor?: string) => { const res = await agent.api.app.bsky.feed.getTimeline( { - algorithm: FeedAlgorithm.ReverseChronological, + algorithm: REVERSE_CHRON, cursor, limit: 4, }, @@ -145,7 +140,7 @@ describe('timeline views', () => { const full = await agent.api.app.bsky.feed.getTimeline( { - algorithm: FeedAlgorithm.ReverseChronological, + algorithm: REVERSE_CHRON, }, { headers: await network.serviceHeaders(carol) }, ) @@ -183,26 +178,15 @@ describe('timeline views', () => { it('blocks posts, reposts, replies by actor takedown', async () => { await Promise.all( [bob, carol].map((did) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + actorDid: did, + takenDown: true, + }), ), ) const aliceTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(alice) }, ) @@ -211,21 +195,10 @@ describe('timeline views', () => { // Cleanup await Promise.all( [bob, carol].map((did) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + actorDid: did, + takenDown: false, + }), ), ) }) @@ -235,27 +208,15 @@ describe('timeline views', () => { const postRef2 = sc.replies[bob][0].ref // Post and reply parent await Promise.all( [postRef1, postRef2].map((postRef) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: true, + }), ), ) const aliceTL = await agent.api.app.bsky.feed.getTimeline( - { algorithm: FeedAlgorithm.ReverseChronological }, + { algorithm: REVERSE_CHRON }, { headers: await network.serviceHeaders(alice) }, ) @@ -264,23 +225,28 @@ describe('timeline views', () => { // Cleanup await Promise.all( [postRef1, postRef2].map((postRef) => - agent.api.com.atproto.admin.emitModerationEvent( - { - event: { $type: 'com.atproto.admin.defs#modEventReverseTakedown' }, - subject: { - $type: 'com.atproto.repo.strongRef', - uri: postRef.uriStr, - cid: postRef.cidStr, - }, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: network.pds.adminAuthHeaders(), - }, - ), + network.bsky.ctx.dataplane.updateTakedown({ + recordUri: postRef.uriStr, + takenDown: false, + }), ), ) }) }) + +const createLabel = async ( + db: PrimaryDatabase, + opts: { uri: string; cid: string; val: string }, +) => { + await db.db + .insertInto('label') + .values({ + uri: opts.uri, + cid: opts.cid, + val: opts.val, + cts: new Date().toISOString(), + neg: false, + src: 'did:example:labeler', + }) + .execute() +} diff --git a/packages/dev-env/src/bsky.ts b/packages/dev-env/src/bsky.ts index 7c29ef4e86f..5f19aae8cc1 100644 --- a/packages/dev-env/src/bsky.ts +++ b/packages/dev-env/src/bsky.ts @@ -1,24 +1,22 @@ -import assert from 'assert' import getPort from 'get-port' import * as ui8 from 'uint8arrays' import * as bsky from '@atproto/bsky' -import { DAY, HOUR, wait } from '@atproto/common-web' +import { DAY, HOUR } from '@atproto/common-web' import { AtpAgent } from '@atproto/api' -import { Secp256k1Keypair, randomIntFromSeed } from '@atproto/crypto' +import { Secp256k1Keypair } from '@atproto/crypto' import { Client as PlcClient } from '@did-plc/lib' import { BskyConfig } from './types' -import { uniqueLockId } from './util' -import { TestNetworkNoAppView } from './network-no-appview' import { ADMIN_PASSWORD, MOD_PASSWORD, TRIAGE_PASSWORD } from './const' +import { BackgroundQueue } from '@atproto/bsky/src/data-plane/server/background' export class TestBsky { constructor( public url: string, public port: number, + public db: bsky.DatabaseCoordinator, public server: bsky.BskyAppView, - public indexer: bsky.BskyIndexer, - public ingester: bsky.BskyIngester, public dataplane: bsky.DataPlaneServer, + public sub: bsky.RepoSubscription, ) {} static async create(cfg: BskyConfig): Promise { @@ -39,7 +37,7 @@ export class TestBsky { const db = new bsky.DatabaseCoordinator({ schema: cfg.dbPostgresSchema, primary: { - url: cfg.dbPrimaryPostgresUrl, + url: cfg.dbPostgresUrl, poolSize: 10, }, replicas: [], @@ -52,26 +50,23 @@ export class TestBsky { ) const config = new bsky.ServerConfig({ - version: '0.0.0', + version: 'unknown', port, didPlcUrl: cfg.plcUrl, publicUrl: 'https://bsky.public.url', serverDid, - didCacheStaleTTL: HOUR, - didCacheMaxTTL: DAY, dataplaneUrl: `http://localhost:${dataplanePort}`, ...cfg, // Each test suite gets its own lock id for the repo subscription adminPassword: ADMIN_PASSWORD, moderatorPassword: MOD_PASSWORD, triagePassword: TRIAGE_PASSWORD, - labelerDid: 'did:example:labeler', feedGenDid: 'did:example:feedGen', }) // Separate migration db in case migration changes some connection state that we need in the tests, e.g. "alter database ... set ..." const migrationDb = new bsky.PrimaryDatabase({ - url: cfg.dbPrimaryPostgresUrl, + url: cfg.dbPostgresUrl, schema: cfg.dbPostgresSchema, }) if (cfg.migration) { @@ -81,90 +76,32 @@ export class TestBsky { } await migrationDb.close() + const didCache = new bsky.DidSqlCache(db.getPrimary(), HOUR, DAY) + // api server const server = bsky.BskyAppView.create({ - db, config, - algos: cfg.algos, - imgInvalidator: cfg.imgInvalidator, + didCache, signingKey: serviceKeypair, }) - // indexer - const ns = cfg.dbPostgresSchema - ? await randomIntFromSeed(cfg.dbPostgresSchema, 1000000) - : undefined - const indexerCfg = new bsky.IndexerConfig({ - version: '0.0.0', - didCacheStaleTTL: HOUR, - didCacheMaxTTL: DAY, - labelerDid: 'did:example:labeler', - redisHost: cfg.redisHost, - dbPostgresUrl: cfg.dbPrimaryPostgresUrl, - dbPostgresSchema: cfg.dbPostgresSchema, - didPlcUrl: cfg.plcUrl, - labelerKeywords: { label_me: 'test-label', label_me_2: 'test-label-2' }, - abyssEndpoint: '', - abyssPassword: '', - imgUriEndpoint: 'img.example.com', - moderationPushUrl: `http://admin:${config.adminPassword}@localhost:${cfg.pdsPort}`, - indexerPartitionIds: [0], - indexerNamespace: `ns${ns}`, - indexerSubLockId: uniqueLockId(), - indexerPort: await getPort(), - ingesterPartitionCount: 1, - pushNotificationEndpoint: 'https://push.bsky.app/api/push', - ...(cfg.indexer ?? {}), - }) - assert(indexerCfg.redisHost) - const indexerRedis = new bsky.Redis({ - host: indexerCfg.redisHost, - namespace: indexerCfg.indexerNamespace, - }) - const indexer = bsky.BskyIndexer.create({ - cfg: indexerCfg, - db: db.getPrimary(), - redis: indexerRedis, - imgInvalidator: cfg.imgInvalidator, - }) - // ingester - const ingesterCfg = new bsky.IngesterConfig({ - version: '0.0.0', - redisHost: cfg.redisHost, - dbPostgresUrl: cfg.dbPrimaryPostgresUrl, - dbPostgresSchema: cfg.dbPostgresSchema, - repoProvider: cfg.repoProvider, - ingesterNamespace: `ns${ns}`, - ingesterSubLockId: uniqueLockId(), - ingesterPartitionCount: 1, - ...(cfg.ingester ?? {}), - }) - assert(ingesterCfg.redisHost) - const ingesterRedis = new bsky.Redis({ - host: ingesterCfg.redisHost, - namespace: ingesterCfg.ingesterNamespace, - }) - const ingester = bsky.BskyIngester.create({ - cfg: ingesterCfg, + + const sub = new bsky.RepoSubscription({ + service: cfg.repoProvider, db: db.getPrimary(), - redis: ingesterRedis, + idResolver: server.ctx.idResolver, + background: new BackgroundQueue(db.getPrimary()), }) - await ingester.start() - await indexer.start() + await server.start() + sub.run() - // we refresh label cache by hand in `processAll` instead of on a timer - server.ctx.labelCache.stop() - return new TestBsky(url, port, server, indexer, ingester, dataplane) + return new TestBsky(url, port, db, server, dataplane, sub) } get ctx(): bsky.AppContext { return this.server.ctx } - get sub() { - return this.indexer.sub - } - getClient() { return new AtpAgent({ service: this.url }) } @@ -188,163 +125,10 @@ export class TestBsky { } } - async processAll() { - await Promise.all([ - this.ctx.backgroundQueue.processAll(), - this.indexer.ctx.backgroundQueue.processAll(), - this.ctx.labelCache.fullRefresh(), - ]) - } - async close() { - await this.server.destroy({ skipDb: true }) + await this.server.destroy() await this.dataplane.destroy() - await this.ingester.destroy({ skipDb: true }) - await this.indexer.destroy() // closes shared db - } -} - -// Below are used for tests just of component parts of the appview, i.e. ingester and indexers: - -export async function getIngester( - network: TestNetworkNoAppView, - opts: { name: string } & Partial, -) { - const { name, ...config } = opts - const ns = name ? await randomIntFromSeed(name, 1000000) : undefined - const cfg = new bsky.IngesterConfig({ - version: '0.0.0', - redisHost: process.env.REDIS_HOST || '', - dbPostgresUrl: process.env.DB_POSTGRES_URL || '', - dbPostgresSchema: `appview_${name}`, - repoProvider: network.pds.url.replace('http://', 'ws://'), - ingesterSubLockId: uniqueLockId(), - ingesterPartitionCount: config.ingesterPartitionCount ?? 1, - ingesterNamespace: `ns${ns}`, - ...config, - }) - const db = new bsky.PrimaryDatabase({ - url: cfg.dbPostgresUrl, - schema: cfg.dbPostgresSchema, - }) - assert(cfg.redisHost) - const redis = new bsky.Redis({ - host: cfg.redisHost, - namespace: cfg.ingesterNamespace, - }) - await db.migrateToLatestOrThrow() - return bsky.BskyIngester.create({ cfg, db, redis }) -} - -// get multiple indexers for separate partitions, sharing db and redis instance. -export async function getIndexers( - network: TestNetworkNoAppView, - opts: Partial & { - name: string - partitionIdsByIndexer: number[][] - }, -): Promise { - const { name, ...config } = opts - const ns = name ? await randomIntFromSeed(name, 1000000) : undefined - const baseCfg: bsky.IndexerConfigValues = { - version: '0.0.0', - didCacheStaleTTL: HOUR, - didCacheMaxTTL: DAY, - labelerDid: 'did:example:labeler', - labelerKeywords: { label_me: 'test-label', label_me_2: 'test-label-2' }, - redisHost: process.env.REDIS_HOST || '', - dbPostgresUrl: process.env.DB_POSTGRES_URL || '', - dbPostgresSchema: `appview_${name}`, - didPlcUrl: network.plc.url, - imgUriEndpoint: '', - abyssEndpoint: '', - abyssPassword: '', - indexerPartitionIds: [0], - indexerNamespace: `ns${ns}`, - ingesterPartitionCount: config.ingesterPartitionCount ?? 1, - ...config, - } - const db = new bsky.PrimaryDatabase({ - url: baseCfg.dbPostgresUrl, - schema: baseCfg.dbPostgresSchema, - }) - assert(baseCfg.redisHost) - const redis = new bsky.Redis({ - host: baseCfg.redisHost, - namespace: baseCfg.indexerNamespace, - }) - const indexers = await Promise.all( - opts.partitionIdsByIndexer.map(async (indexerPartitionIds) => { - const cfg = new bsky.IndexerConfig({ - ...baseCfg, - indexerPartitionIds, - indexerSubLockId: uniqueLockId(), - indexerPort: await getPort(), - }) - return bsky.BskyIndexer.create({ cfg, db, redis }) - }), - ) - await db.migrateToLatestOrThrow() - return { - db, - list: indexers, - async start() { - await Promise.all(indexers.map((indexer) => indexer.start())) - }, - async destroy() { - const stopping = [...indexers] - const lastIndexer = stopping.pop() - await Promise.all( - stopping.map((indexer) => - indexer.destroy({ skipDb: true, skipRedis: true }), - ), - ) - await lastIndexer?.destroy() - }, - } -} - -export type BskyIndexers = { - db: bsky.Database - list: bsky.BskyIndexer[] - start(): Promise - destroy(): Promise -} - -export async function processAll( - network: TestNetworkNoAppView, - ingester: bsky.BskyIngester, -) { - await network.pds.processAll() - await ingestAll(network, ingester) - // eslint-disable-next-line no-constant-condition - while (true) { - // check indexers - const keys = [...Array(ingester.sub.opts.partitionCount)].map( - (_, i) => `repo:${i}`, - ) - const results = await ingester.sub.ctx.redis.streamLengths(keys) - const indexersCaughtUp = results.every((len) => len === 0) - if (indexersCaughtUp) return - await wait(50) - } -} - -export async function ingestAll( - network: TestNetworkNoAppView, - ingester: bsky.BskyIngester, -) { - const sequencer = network.pds.ctx.sequencer - await network.pds.processAll() - // eslint-disable-next-line no-constant-condition - while (true) { - await wait(50) - // check ingester - const [ingesterCursor, curr] = await Promise.all([ - ingester.sub.getCursor(), - sequencer.curr(), - ]) - const ingesterCaughtUp = curr !== null && ingesterCursor === curr - if (ingesterCaughtUp) return + await this.sub.destroy() + await this.db.close() } } diff --git a/packages/dev-env/src/network.ts b/packages/dev-env/src/network.ts index 6079ec4c968..ecc46ff35c0 100644 --- a/packages/dev-env/src/network.ts +++ b/packages/dev-env/src/network.ts @@ -38,9 +38,8 @@ export class TestNetwork extends TestNetworkNoAppView { pdsPort, repoProvider: `ws://localhost:${pdsPort}`, dbPostgresSchema: `appview_${dbPostgresSchema}`, - dbPrimaryPostgresUrl: dbPostgresUrl, + dbPostgresUrl, redisHost, - moderationPushUrl: `http://admin:${ADMIN_PASSWORD}@localhost:${pdsPort}`, ...params.bsky, }) const pds = await TestPds.create({ @@ -58,13 +57,12 @@ export class TestNetwork extends TestNetworkNoAppView { } async processFullSubscription(timeout = 5000) { - const sub = this.bsky.indexer.sub + const sub = this.bsky.sub const start = Date.now() const lastSeq = await this.pds.ctx.sequencer.curr() if (!lastSeq) return while (Date.now() - start < timeout) { - const partitionState = sub.partitions.get(0) - if (partitionState?.cursor >= lastSeq) { + if (sub.seenSeq !== null && sub.seenSeq >= lastSeq) { // has seen last seq, just need to wait for it to finish processing await sub.repoQueue.main.onIdle() return @@ -77,7 +75,7 @@ export class TestNetwork extends TestNetworkNoAppView { async processAll(timeout?: number) { await this.pds.processAll() await this.processFullSubscription(timeout) - await this.bsky.processAll() + await this.bsky.sub.background.processAll() } async serviceHeaders(did: string, aud?: string) { diff --git a/packages/dev-env/src/types.ts b/packages/dev-env/src/types.ts index 3bbcaf15257..5a182b66037 100644 --- a/packages/dev-env/src/types.ts +++ b/packages/dev-env/src/types.ts @@ -15,14 +15,12 @@ export type PdsConfig = Partial & { export type BskyConfig = Partial & { plcUrl: string repoProvider: string - dbPrimaryPostgresUrl: string + dbPostgresUrl: string + dbPostgresSchema: string redisHost: string pdsPort: number imgInvalidator?: ImageInvalidator migration?: string - algos?: bsky.MountedAlgos - indexer?: Partial - ingester?: Partial } export type TestServerParams = { diff --git a/packages/dev-env/src/util.ts b/packages/dev-env/src/util.ts index 7d6091023f6..2cfbd6dcbd7 100644 --- a/packages/dev-env/src/util.ts +++ b/packages/dev-env/src/util.ts @@ -7,7 +7,6 @@ export const mockNetworkUtilities = (pds: TestPds, bsky?: TestBsky) => { mockResolvers(pds.ctx.idResolver, pds) if (bsky) { mockResolvers(bsky.ctx.idResolver, pds) - mockResolvers(bsky.indexer.ctx.idResolver, pds) } } From 83718796ed9f7fb23110d0aaef5fd721394dabcc Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 14:19:25 -0500 Subject: [PATCH 07/17] cull bsky service entrypoint --- services/bsky/Dockerfile | 1 - services/bsky/api.js | 182 +++------------------------------------ 2 files changed, 11 insertions(+), 172 deletions(-) diff --git a/services/bsky/Dockerfile b/services/bsky/Dockerfile index 9da764ecc3d..24f0a512458 100644 --- a/services/bsky/Dockerfile +++ b/services/bsky/Dockerfile @@ -8,7 +8,6 @@ COPY ./*.* ./ # NOTE bsky's transitive dependencies go here: if that changes, this needs to be updated. COPY ./packages/bsky ./packages/bsky COPY ./packages/api ./packages/api -COPY ./packages/aws ./packages/aws COPY ./packages/common ./packages/common COPY ./packages/common-web ./packages/common-web COPY ./packages/crypto ./packages/crypto diff --git a/services/bsky/api.js b/services/bsky/api.js index 2e78d3bafec..38d9c077ab1 100644 --- a/services/bsky/api.js +++ b/services/bsky/api.js @@ -11,196 +11,36 @@ require('dd-trace') // Only works with commonjs }) // Tracer code above must come before anything else -const path = require('path') -const assert = require('assert') -const { - BunnyInvalidator, - CloudfrontInvalidator, - MultiImageInvalidator, -} = require('@atproto/aws') +const path = require('nose:path') +const assert = require('node:assert') const { Secp256k1Keypair } = require('@atproto/crypto') -const { - DatabaseCoordinator, - PrimaryDatabase, - ServerConfig, - BskyAppView, - ViewMaintainer, - makeAlgos, - PeriodicModerationEventReversal, -} = require('@atproto/bsky') +const { ServerConfig, BskyAppView, makeAlgos } = require('@atproto/bsky') +const { MemoryCache: MemoryDidCache } = require('@atproto/identity') const main = async () => { const env = getEnv() - assert(env.dbPrimaryPostgresUrl, 'missing configuration for db') - - if (env.enableMigrations) { - // separate db needed for more permissions - const migrateDb = new PrimaryDatabase({ - url: env.dbMigratePostgresUrl, - schema: env.dbPostgresSchema, - poolSize: 2, - }) - await migrateDb.migrateToLatestOrThrow() - await migrateDb.close() - } - - const db = new DatabaseCoordinator({ - schema: env.dbPostgresSchema, - primary: { - url: env.dbPrimaryPostgresUrl, - poolSize: env.dbPrimaryPoolSize || env.dbPoolSize, - poolMaxUses: env.dbPoolMaxUses, - poolIdleTimeoutMs: env.dbPoolIdleTimeoutMs, - }, - replicas: env.dbReplicaPostgresUrls?.map((url, i) => { - return { - url, - poolSize: env.dbPoolSize, - poolMaxUses: env.dbPoolMaxUses, - poolIdleTimeoutMs: env.dbPoolIdleTimeoutMs, - tags: getTagsForIdx(env.dbReplicaTags, i), - } - }), - }) - const cfg = ServerConfig.readEnv({ - port: env.port, - version: env.version, - dbPrimaryPostgresUrl: env.dbPrimaryPostgresUrl, - dbReplicaPostgresUrls: env.dbReplicaPostgresUrls, - dbReplicaTags: env.dbReplicaTags, - dbPostgresSchema: env.dbPostgresSchema, - publicUrl: env.publicUrl, - didPlcUrl: env.didPlcUrl, - imgUriSalt: env.imgUriSalt, - imgUriKey: env.imgUriKey, - imgUriEndpoint: env.imgUriEndpoint, - blobCacheLocation: env.blobCacheLocation, - }) - + const config = ServerConfig.readEnv() + assert(env.serviceSigningKey, 'must set BSKY_SERVICE_SIGNING_KEY') const signingKey = await Secp256k1Keypair.import(env.serviceSigningKey) - - // configure zero, one, or both image invalidators - let imgInvalidator - const bunnyInvalidator = env.bunnyAccessKey - ? new BunnyInvalidator({ - accessKey: env.bunnyAccessKey, - urlPrefix: cfg.imgUriEndpoint, - }) - : undefined - const cfInvalidator = env.cfDistributionId - ? new CloudfrontInvalidator({ - distributionId: env.cfDistributionId, - pathPrefix: cfg.imgUriEndpoint && new URL(cfg.imgUriEndpoint).pathname, - }) - : undefined - - if (bunnyInvalidator && imgInvalidator) { - imgInvalidator = new MultiImageInvalidator([ - bunnyInvalidator, - imgInvalidator, - ]) - } else if (bunnyInvalidator) { - imgInvalidator = bunnyInvalidator - } else if (cfInvalidator) { - imgInvalidator = cfInvalidator - } - const algos = env.feedPublisherDid ? makeAlgos(env.feedPublisherDid) : {} + const didCache = new MemoryDidCache() // @TODO persistent, shared cache const bsky = BskyAppView.create({ - db, + config, signingKey, - config: cfg, - imgInvalidator, + didCache, algos, }) - // separate db needed for more permissions - const viewMaintainerDb = new PrimaryDatabase({ - url: env.dbMigratePostgresUrl, - schema: env.dbPostgresSchema, - poolSize: 2, - }) - const viewMaintainer = new ViewMaintainer(viewMaintainerDb, 1800) - const viewMaintainerRunning = viewMaintainer.run() - - const periodicModerationEventReversal = new PeriodicModerationEventReversal( - bsky.ctx, - ) - const periodicModerationEventReversalRunning = - periodicModerationEventReversal.run() - await bsky.start() - // Graceful shutdown (see also https://aws.amazon.com/blogs/containers/graceful-shutdowns-with-ecs/) process.on('SIGTERM', async () => { - // Gracefully shutdown periodic-moderation-event-reversal before destroying bsky instance - periodicModerationEventReversal.destroy() - await periodicModerationEventReversalRunning await bsky.destroy() - viewMaintainer.destroy() - await viewMaintainerRunning - await viewMaintainerDb.close() }) } const getEnv = () => ({ - enableMigrations: process.env.ENABLE_MIGRATIONS === 'true', - port: parseInt(process.env.PORT), - version: process.env.BSKY_VERSION, - dbMigratePostgresUrl: - process.env.DB_MIGRATE_POSTGRES_URL || process.env.DB_PRIMARY_POSTGRES_URL, - dbPrimaryPostgresUrl: process.env.DB_PRIMARY_POSTGRES_URL, - dbPrimaryPoolSize: maybeParseInt(process.env.DB_PRIMARY_POOL_SIZE), - dbReplicaPostgresUrls: process.env.DB_REPLICA_POSTGRES_URLS - ? process.env.DB_REPLICA_POSTGRES_URLS.split(',') - : undefined, - dbReplicaTags: { - '*': getTagIdxs(process.env.DB_REPLICA_TAGS_ANY), // e.g. DB_REPLICA_TAGS_ANY=0,1 - timeline: getTagIdxs(process.env.DB_REPLICA_TAGS_TIMELINE), - feed: getTagIdxs(process.env.DB_REPLICA_TAGS_FEED), - search: getTagIdxs(process.env.DB_REPLICA_TAGS_SEARCH), - thread: getTagIdxs(process.env.DB_REPLICA_TAGS_THREAD), - }, - dbPostgresSchema: process.env.DB_POSTGRES_SCHEMA || undefined, - dbPoolSize: maybeParseInt(process.env.DB_POOL_SIZE), - dbPoolMaxUses: maybeParseInt(process.env.DB_POOL_MAX_USES), - dbPoolIdleTimeoutMs: maybeParseInt(process.env.DB_POOL_IDLE_TIMEOUT_MS), - serviceSigningKey: process.env.SERVICE_SIGNING_KEY, - publicUrl: process.env.PUBLIC_URL, - didPlcUrl: process.env.DID_PLC_URL, - imgUriSalt: process.env.IMG_URI_SALT, - imgUriKey: process.env.IMG_URI_KEY, - imgUriEndpoint: process.env.IMG_URI_ENDPOINT, - blobCacheLocation: process.env.BLOB_CACHE_LOC, - bunnyAccessKey: process.env.BUNNY_ACCESS_KEY, - cfDistributionId: process.env.CF_DISTRIBUTION_ID, - feedPublisherDid: process.env.FEED_PUBLISHER_DID, + serviceSigningKey: process.env.BSKY_SERVICE_SIGNING_KEY || undefined, + feedPublisherDid: process.env.BSKY_FEED_PUBLISHER_DID || undefined, }) -/** - * @param {Record} tags - * @param {number} idx - */ -const getTagsForIdx = (tagMap, idx) => { - const tags = [] - for (const [tag, indexes] of Object.entries(tagMap)) { - if (indexes.includes(idx)) { - tags.push(tag) - } - } - return tags -} - -/** - * @param {string} str - */ -const getTagIdxs = (str) => { - return str ? str.split(',').map((item) => parseInt(item, 10)) : [] -} - -const maybeParseInt = (str) => { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} - const maintainXrpcResource = (span, req) => { // Show actual xrpc method as resource rather than the route pattern if (span && req.originalUrl?.startsWith('/xrpc/')) { From 012c754231a29db7c39387cf490250b2b9c6945f Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 14:35:42 -0500 Subject: [PATCH 08/17] add bsky service readme --- services/bsky/README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 services/bsky/README.md diff --git a/services/bsky/README.md b/services/bsky/README.md new file mode 100644 index 00000000000..670165deac1 --- /dev/null +++ b/services/bsky/README.md @@ -0,0 +1,20 @@ +# bsky appview service + +This is the service entrypoint for the bsky appview. The entrypoint command should run `api.js` with node, e.g. `node api.js`. The following env vars are supported: + +- `BSKY_PUBLIC_URL` - (required) the public url of the appview, e.g. `https://api.bsky.app`. +- `BSKY_DID_PLC_URL` - (required) the url of the PLC service used for looking up did documents, e.g. `https://plc.directory`. +- `BSKY_DATAPLANE_URL` - (required) the url where the backing dataplane service lives. +- `BSKY_SERVICE_SIGNING_KEY` - (required) the public signing key in the form of a `did:key`, used for service-to-service auth. Advertised in the appview's `did:web`` document. +- `BSKY_ADMIN_PASSWORD` - (required) the admin password used for role-based auth. +- `NODE_ENV` - (recommended) for production usage, should be set to `production`. Otherwise all responses are validated on their way out. There may be other effects of not setting this to `production`, as dependencies may also implement debug modes based on its value. +- `BSKY_VERSION` - (recommended) version of the bsky service. This is advertised by the health endpoint. +- `BSKY_PORT` - (recommended) the port that the service will run on. +- `BSKY_IMG_URI_ENDPOINT` - (recommended) the base url for resized images, e.g. `https://https://cdn.bsky.app/img`. When not set, sets-up an image resizing service directly on the appview. +- `BSKY_SERVER_DID` - (recommended) the did of the appview service. When this is a `did:web` that matches the appview's public url, a `did:web` document is served. +- `BSKY_FEED_PUBLISHER_DID` - indicates the publisher did of any feedgen records which the appview supports. +- `BSKY_FEED_GEN_DID` - the did of the appview's feed generator service. When present the appview implements `app.bsky.feed.describeFeedGenerator`. +- `BSKY_HANDLE_RESOLVE_NAMESERVERS` - alternative domain name servers used for handle resolution, comma-separated. +- `BSKY_BLOB_CACHE_LOC` - when `BSKY_IMG_URI_ENDPOINT` is not set, this determines where resized blobs are cached by the image resizing service. +- `BSKY_MODERATOR_PASSWORD` - the moderator password used for role-based auth. +- `BSKY_TRIAGE_PASSWORD` - the triage password used for role-based auth. From f2edcfd82360d0f49bc1437d721b1789faa55550 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 14:36:04 -0500 Subject: [PATCH 09/17] build --- .github/workflows/build-and-push-bsky-ghcr.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-and-push-bsky-ghcr.yaml b/.github/workflows/build-and-push-bsky-ghcr.yaml index 5d22cd9a389..cd313b5d4bc 100644 --- a/.github/workflows/build-and-push-bsky-ghcr.yaml +++ b/.github/workflows/build-and-push-bsky-ghcr.yaml @@ -3,6 +3,7 @@ on: push: branches: - main + - bav-v2-drop-pg env: REGISTRY: ghcr.io USERNAME: ${{ github.actor }} From a93ae30657660ece96d5d564b39195d7f0769f3e Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 14:51:29 -0500 Subject: [PATCH 10/17] tidy --- packages/dev-env/src/mock/index.ts | 50 +++++++++++++++++------------- services/bsky/README.md | 2 +- 2 files changed, 29 insertions(+), 23 deletions(-) diff --git a/packages/dev-env/src/mock/index.ts b/packages/dev-env/src/mock/index.ts index 10f76b1c259..eec0bb3b38e 100644 --- a/packages/dev-env/src/mock/index.ts +++ b/packages/dev-env/src/mock/index.ts @@ -1,5 +1,6 @@ import { AtUri } from '@atproto/syntax' import AtpAgent from '@atproto/api' +import { PrimaryDatabase } from '@atproto/bsky' import { REASONSPAM, REASONOTHER, @@ -186,28 +187,16 @@ export async function generateMockSetup(env: TestNetwork) { }, ) - const ctx = env.bsky.ctx - if (ctx) { - const labelSrvc = ctx.services.label(ctx.db.getPrimary()) - await labelSrvc.createLabels([ - { - src: ctx.cfg.labelerDid, - uri: labeledPost.uri, - cid: labeledPost.cid, - val: 'nudity', - neg: false, - cts: new Date().toISOString(), - }, - { - src: ctx.cfg.labelerDid, - uri: filteredPost.uri, - cid: filteredPost.cid, - val: 'dmca-violation', - neg: false, - cts: new Date().toISOString(), - }, - ]) - } + await createLabel(env.bsky.db.getPrimary(), { + uri: labeledPost.uri, + cid: labeledPost.cid, + val: 'nudity', + }) + await createLabel(env.bsky.db.getPrimary(), { + uri: filteredPost.uri, + cid: filteredPost.cid, + val: 'dmca-violation', + }) // a set of replies for (let i = 0; i < 100; i++) { @@ -341,3 +330,20 @@ export async function generateMockSetup(env: TestNetwork) { function ucfirst(str: string): string { return str.at(0)?.toUpperCase() + str.slice(1) } + +const createLabel = async ( + db: PrimaryDatabase, + opts: { uri: string; cid: string; val: string }, +) => { + await db.db + .insertInto('label') + .values({ + uri: opts.uri, + cid: opts.cid, + val: opts.val, + cts: new Date().toISOString(), + neg: false, + src: 'did:example:labeler', + }) + .execute() +} diff --git a/services/bsky/README.md b/services/bsky/README.md index 670165deac1..c8bbb8ad4d2 100644 --- a/services/bsky/README.md +++ b/services/bsky/README.md @@ -10,7 +10,7 @@ This is the service entrypoint for the bsky appview. The entrypoint command shou - `NODE_ENV` - (recommended) for production usage, should be set to `production`. Otherwise all responses are validated on their way out. There may be other effects of not setting this to `production`, as dependencies may also implement debug modes based on its value. - `BSKY_VERSION` - (recommended) version of the bsky service. This is advertised by the health endpoint. - `BSKY_PORT` - (recommended) the port that the service will run on. -- `BSKY_IMG_URI_ENDPOINT` - (recommended) the base url for resized images, e.g. `https://https://cdn.bsky.app/img`. When not set, sets-up an image resizing service directly on the appview. +- `BSKY_IMG_URI_ENDPOINT` - (recommended) the base url for resized images, e.g. `https://cdn.bsky.app/img`. When not set, sets-up an image resizing service directly on the appview. - `BSKY_SERVER_DID` - (recommended) the did of the appview service. When this is a `did:web` that matches the appview's public url, a `did:web` document is served. - `BSKY_FEED_PUBLISHER_DID` - indicates the publisher did of any feedgen records which the appview supports. - `BSKY_FEED_GEN_DID` - the did of the appview's feed generator service. When present the appview implements `app.bsky.feed.describeFeedGenerator`. From 9229cc2a0ad571867d85049ae685058edf7e830b Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 15:19:52 -0500 Subject: [PATCH 11/17] tidy, fix pds proxy tests --- packages/bsky/src/index.ts | 1 - packages/dev-env/src/bsky.ts | 2 +- packages/dev-env/src/types.ts | 3 +- .../__snapshots__/feedgen.test.ts.snap | 954 +----------------- .../proxied/__snapshots__/views.test.ts.snap | 180 +--- packages/pds/tests/proxied/admin.test.ts | 5 +- packages/pds/tests/proxied/feedgen.test.ts | 10 +- packages/pds/tests/proxied/procedures.test.ts | 2 +- .../tests/proxied/read-after-write.test.ts | 2 +- packages/pds/tests/proxied/views.test.ts | 4 +- packages/pds/tests/seeds/basic.ts | 41 +- 11 files changed, 47 insertions(+), 1157 deletions(-) diff --git a/packages/bsky/src/index.ts b/packages/bsky/src/index.ts index b0967cfaee9..df8476f7f36 100644 --- a/packages/bsky/src/index.ts +++ b/packages/bsky/src/index.ts @@ -38,7 +38,6 @@ export class BskyAppView { public app: express.Application public server?: http.Server private terminator?: HttpTerminator - private dbStatsInterval: NodeJS.Timer constructor(opts: { ctx: AppContext; app: express.Application }) { this.ctx = opts.ctx diff --git a/packages/dev-env/src/bsky.ts b/packages/dev-env/src/bsky.ts index 5f19aae8cc1..3b726f2a096 100644 --- a/packages/dev-env/src/bsky.ts +++ b/packages/dev-env/src/bsky.ts @@ -57,7 +57,6 @@ export class TestBsky { serverDid, dataplaneUrl: `http://localhost:${dataplanePort}`, ...cfg, - // Each test suite gets its own lock id for the repo subscription adminPassword: ADMIN_PASSWORD, moderatorPassword: MOD_PASSWORD, triagePassword: TRIAGE_PASSWORD, @@ -83,6 +82,7 @@ export class TestBsky { config, didCache, signingKey: serviceKeypair, + algos: cfg.algos, }) const sub = new bsky.RepoSubscription({ diff --git a/packages/dev-env/src/types.ts b/packages/dev-env/src/types.ts index 5a182b66037..6cbe81ec6f5 100644 --- a/packages/dev-env/src/types.ts +++ b/packages/dev-env/src/types.ts @@ -1,6 +1,5 @@ import * as pds from '@atproto/pds' import * as bsky from '@atproto/bsky' -import { ImageInvalidator } from '@atproto/bsky/src/image/invalidator' export type PlcConfig = { port?: number @@ -19,8 +18,8 @@ export type BskyConfig = Partial & { dbPostgresSchema: string redisHost: string pdsPort: number - imgInvalidator?: ImageInvalidator migration?: string + algos?: bsky.MountedAlgos } export type TestServerParams = { diff --git a/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap b/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap index 107ae5667be..01002de0be5 100644 --- a/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap +++ b/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap @@ -2,958 +2,6 @@ exports[`feedgen proxy view performs basic proxy of getFeed 1`] = ` Object { - "cursor": "0000000000000::bafycid", - "feed": Array [ - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(0)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "reply": Object { - "parent": Object { - "cid": "cids(4)", - "uri": "record(3)", - }, - "root": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - }, - "text": "thanks bob", - }, - "replyCount": 0, - "repostCount": 1, - "uri": "record(0)", - "viewer": Object {}, - }, - "reply": Object { - "parent": Object { - "$type": "app.bsky.feed.defs#postView", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(3)/cids(1)@jpeg", - "did": "user(2)", - "displayName": "bobby", - "handle": "bob.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", - "muted": false, - }, - }, - "cid": "cids(4)", - "embed": Object { - "$type": "app.bsky.embed.images#view", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", - }, - ], - }, - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "embed": Object { - "$type": "app.bsky.embed.images", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "image": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(5)", - }, - "size": 4114, - }, - }, - ], - }, - "reply": Object { - "parent": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - "root": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - }, - "text": "hear that label_me label_me_2", - }, - "replyCount": 1, - "repostCount": 0, - "uri": "record(3)", - "viewer": Object {}, - }, - "root": Object { - "$type": "app.bsky.feed.defs#postView", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(3)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 3, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000000Z", - "text": "again", - }, - "replyCount": 2, - "repostCount": 1, - "uri": "record(2)", - "viewer": Object {}, - }, - }, - }, - Object { - "post": Object { - "author": Object { - "did": "user(4)", - "handle": "carol.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", - "muted": false, - }, - }, - "cid": "cids(6)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "reply": Object { - "parent": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - "root": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - }, - "text": "of course", - }, - "replyCount": 0, - "repostCount": 0, - "uri": "record(6)", - "viewer": Object {}, - }, - "reply": Object { - "parent": Object { - "$type": "app.bsky.feed.defs#postView", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(3)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 3, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000000Z", - "text": "again", - }, - "replyCount": 2, - "repostCount": 1, - "uri": "record(2)", - "viewer": Object {}, - }, - "root": Object { - "$type": "app.bsky.feed.defs#postView", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(3)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 3, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000000Z", - "text": "again", - }, - "replyCount": 2, - "repostCount": 1, - "uri": "record(2)", - "viewer": Object {}, - }, - }, - }, - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(3)/cids(1)@jpeg", - "did": "user(2)", - "displayName": "bobby", - "handle": "bob.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", - "muted": false, - }, - }, - "cid": "cids(4)", - "embed": Object { - "$type": "app.bsky.embed.images#view", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", - }, - ], - }, - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "embed": Object { - "$type": "app.bsky.embed.images", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "image": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(5)", - }, - "size": 4114, - }, - }, - ], - }, - "reply": Object { - "parent": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - "root": Object { - "cid": "cids(3)", - "uri": "record(2)", - }, - }, - "text": "hear that label_me label_me_2", - }, - "replyCount": 1, - "repostCount": 0, - "uri": "record(3)", - "viewer": Object {}, - }, - "reply": Object { - "parent": Object { - "$type": "app.bsky.feed.defs#postView", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(3)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 3, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000000Z", - "text": "again", - }, - "replyCount": 2, - "repostCount": 1, - "uri": "record(2)", - "viewer": Object {}, - }, - "root": Object { - "$type": "app.bsky.feed.defs#postView", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(3)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 3, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000000Z", - "text": "again", - }, - "replyCount": 2, - "repostCount": 1, - "uri": "record(2)", - "viewer": Object {}, - }, - }, - }, - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(7)", - "embed": Object { - "$type": "app.bsky.embed.record#view", - "record": Object { - "$type": "app.bsky.embed.record#viewRecord", - "author": Object { - "did": "user(5)", - "handle": "dan.test", - "labels": Array [ - Object { - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "user(5)", - "val": "repo-action-label", - }, - ], - "viewer": Object { - "blockedBy": false, - "following": "record(11)", - "muted": false, - }, - }, - "cid": "cids(8)", - "embeds": Array [ - Object { - "$type": "app.bsky.embed.record#view", - "record": Object { - "$type": "app.bsky.embed.record#viewRecord", - "author": Object { - "did": "user(4)", - "handle": "carol.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", - "muted": false, - }, - }, - "cid": "cids(9)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "uri": "record(12)", - "value": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "embed": Object { - "$type": "app.bsky.embed.recordWithMedia", - "media": Object { - "$type": "app.bsky.embed.images", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "image": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(5)", - }, - "size": 4114, - }, - }, - Object { - "alt": "tests/sample-img/key-alt.jpg", - "image": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(10)", - }, - "size": 12736, - }, - }, - ], - }, - "record": Object { - "record": Object { - "cid": "cids(11)", - "uri": "record(13)", - }, - }, - }, - "text": "hi im carol", - }, - }, - }, - ], - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "uri": "record(10)", - "value": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "embed": Object { - "$type": "app.bsky.embed.record", - "record": Object { - "cid": "cids(9)", - "uri": "record(12)", - }, - }, - "facets": Array [ - Object { - "features": Array [ - Object { - "$type": "app.bsky.richtext.facet#mention", - "did": "user(0)", - }, - ], - "index": Object { - "byteEnd": 18, - "byteStart": 0, - }, - }, - ], - "text": "@alice.bluesky.xyz is the best", - }, - }, - }, - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(7)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(9)", - "val": "test-label", - }, - ], - "likeCount": 2, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "embed": Object { - "$type": "app.bsky.embed.record", - "record": Object { - "cid": "cids(8)", - "uri": "record(10)", - }, - }, - "text": "yoohoo label_me", - }, - "replyCount": 0, - "repostCount": 0, - "uri": "record(9)", - "viewer": Object {}, - }, - }, - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(3)/cids(1)@jpeg", - "did": "user(2)", - "displayName": "bobby", - "handle": "bob.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", - "muted": false, - }, - }, - "cid": "cids(12)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000+00:00", - "text": "bobby boy here", - }, - "replyCount": 0, - "repostCount": 0, - "uri": "record(14)", - "viewer": Object {}, - }, - }, - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(3)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 3, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000000Z", - "text": "again", - }, - "replyCount": 2, - "repostCount": 1, - "uri": "record(2)", - "viewer": Object {}, - }, - }, - Object { - "post": Object { - "author": Object { - "did": "user(4)", - "handle": "carol.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", - "muted": false, - }, - }, - "cid": "cids(9)", - "embed": Object { - "$type": "app.bsky.embed.recordWithMedia#view", - "media": Object { - "$type": "app.bsky.embed.images#view", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5)@jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5)@jpeg", - }, - Object { - "alt": "tests/sample-img/key-alt.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(10)@jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(10)@jpeg", - }, - ], - }, - "record": Object { - "record": Object { - "$type": "app.bsky.embed.record#viewRecord", - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(3)/cids(1)@jpeg", - "did": "user(2)", - "displayName": "bobby", - "handle": "bob.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", - "muted": false, - }, - }, - "cid": "cids(11)", - "embeds": Array [], - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "uri": "record(13)", - "value": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "langs": Array [ - "en-US", - "i-klingon", - ], - "text": "bob back at it again!", - }, - }, - }, - }, - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 2, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "embed": Object { - "$type": "app.bsky.embed.recordWithMedia", - "media": Object { - "$type": "app.bsky.embed.images", - "images": Array [ - Object { - "alt": "tests/sample-img/key-landscape-small.jpg", - "image": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(5)", - }, - "size": 4114, - }, - }, - Object { - "alt": "tests/sample-img/key-alt.jpg", - "image": Object { - "$type": "blob", - "mimeType": "image/jpeg", - "ref": Object { - "$link": "cids(10)", - }, - "size": 12736, - }, - }, - ], - }, - "record": Object { - "record": Object { - "cid": "cids(11)", - "uri": "record(13)", - }, - }, - }, - "text": "hi im carol", - }, - "replyCount": 0, - "repostCount": 0, - "uri": "record(12)", - "viewer": Object { - "like": "record(15)", - }, - }, - }, - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(3)/cids(1)@jpeg", - "did": "user(2)", - "displayName": "bobby", - "handle": "bob.test", - "labels": Array [], - "viewer": Object { - "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", - "muted": false, - }, - }, - "cid": "cids(11)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "langs": Array [ - "en-US", - "i-klingon", - ], - "text": "bob back at it again!", - }, - "replyCount": 0, - "repostCount": 0, - "uri": "record(13)", - "viewer": Object {}, - }, - }, - Object { - "post": Object { - "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "cid": "cids(13)", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(13)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(16)", - "val": "self-label", - }, - ], - "likeCount": 0, - "record": Object { - "$type": "app.bsky.feed.post", - "createdAt": "1970-01-01T00:00:00.000Z", - "labels": Object { - "$type": "com.atproto.label.defs#selfLabels", - "values": Array [ - Object { - "val": "self-label", - }, - ], - }, - "text": "hey there", - }, - "replyCount": 0, - "repostCount": 0, - "uri": "record(16)", - "viewer": Object {}, - }, - }, - ], + "feed": Array [], } `; diff --git a/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap b/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap index 7d7d4325dcf..a0a3b0c860c 100644 --- a/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap +++ b/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap @@ -303,24 +303,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(0)", - "val": "test-label", - }, - Object { - "cid": "cids(0)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(0)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -757,24 +740,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -885,24 +851,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1181,16 +1130,7 @@ Object { }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(6)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(6)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -1666,24 +1606,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -2100,24 +2023,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -2356,24 +2262,7 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label", - }, - Object { - "cid": "cids(4)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(3)", - "val": "test-label-2", - }, - ], + "labels": Array [], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -2652,16 +2541,7 @@ Object { }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(11)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "did:example:labeler", - "uri": "record(13)", - "val": "test-label", - }, - ], + "labels": Array [], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -3519,48 +3399,6 @@ Object { exports[`proxies view requests unspecced.getPopularFeedGenerators 1`] = ` Object { - "cursor": "0000000000000::bafycid", - "feeds": Array [ - Object { - "cid": "cids(0)", - "creator": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", - "description": "its me!", - "did": "user(0)", - "displayName": "ali", - "handle": "alice.test", - "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [ - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-a", - }, - Object { - "cid": "cids(2)", - "cts": "1970-01-01T00:00:00.000Z", - "neg": false, - "src": "user(0)", - "uri": "record(1)", - "val": "self-label-b", - }, - ], - "viewer": Object { - "blockedBy": false, - "muted": false, - }, - }, - "description": "Provides all feed candidates", - "did": "did:example:feedgen", - "displayName": "All", - "indexedAt": "1970-01-01T00:00:00.000Z", - "likeCount": 0, - "uri": "record(0)", - "viewer": Object {}, - }, - ], + "feeds": Array [], } `; diff --git a/packages/pds/tests/proxied/admin.test.ts b/packages/pds/tests/proxied/admin.test.ts index fd8538e802a..c4d75ff8b2b 100644 --- a/packages/pds/tests/proxied/admin.test.ts +++ b/packages/pds/tests/proxied/admin.test.ts @@ -8,7 +8,8 @@ import { import { forSnapshot } from '../_util' import { NotFoundError } from '@atproto/api/src/client/types/app/bsky/feed/getPostThread' -describe('proxies admin requests', () => { +// @TODO skipping during appview v2 buildout, as appview frontends no longer contains moderation endpoints +describe.skip('proxies admin requests', () => { let network: TestNetwork let agent: AtpAgent let sc: SeedClient @@ -34,7 +35,7 @@ describe('proxies admin requests', () => { ) await basicSeed(sc, { inviteCode: invite.code, - addModLabels: true, + addModLabels: network.bsky, }) await network.processAll() }) diff --git a/packages/pds/tests/proxied/feedgen.test.ts b/packages/pds/tests/proxied/feedgen.test.ts index 6f06ce0d020..32e03256a7c 100644 --- a/packages/pds/tests/proxied/feedgen.test.ts +++ b/packages/pds/tests/proxied/feedgen.test.ts @@ -1,4 +1,3 @@ -import { makeAlgos } from '@atproto/bsky' import AtpAgent, { AtUri, FeedNS } from '@atproto/api' import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' @@ -19,11 +18,16 @@ describe('feedgen proxy view', () => { beforeAll(async () => { network = await TestNetwork.create({ dbPostgresSchema: 'proxy_feedgen', - bsky: { algos: makeAlgos(feedUri.host) }, + bsky: { + // @TODO consider using makeAlgos() here if the appview begins supporting any feeds out of the box + algos: { + [feedUri.toString()]: async () => ({ feedItems: [] }), + }, + }, }) agent = network.pds.getClient() sc = network.getSeedClient() - await basicSeed(sc, { addModLabels: true }) + await basicSeed(sc, { addModLabels: network.bsky }) // publish feed const feed = await agent.api.app.bsky.feed.generator.create( { repo: sc.dids.alice, rkey: feedUri.rkey }, diff --git a/packages/pds/tests/proxied/procedures.test.ts b/packages/pds/tests/proxied/procedures.test.ts index 8c246e38da7..3aeb7d98d7e 100644 --- a/packages/pds/tests/proxied/procedures.test.ts +++ b/packages/pds/tests/proxied/procedures.test.ts @@ -17,7 +17,7 @@ describe('proxies appview procedures', () => { }) agent = network.pds.getClient() sc = network.getSeedClient() - await basicSeed(sc, { addModLabels: true }) + await basicSeed(sc, { addModLabels: network.bsky }) await network.processAll() alice = sc.dids.alice bob = sc.dids.bob diff --git a/packages/pds/tests/proxied/read-after-write.test.ts b/packages/pds/tests/proxied/read-after-write.test.ts index 52507a2730a..df7629ee778 100644 --- a/packages/pds/tests/proxied/read-after-write.test.ts +++ b/packages/pds/tests/proxied/read-after-write.test.ts @@ -21,7 +21,7 @@ describe('proxy read after write', () => { }) agent = network.pds.getClient() sc = network.getSeedClient() - await basicSeed(sc, { addModLabels: true }) + await basicSeed(sc, { addModLabels: network.bsky }) await network.processAll() alice = sc.dids.alice carol = sc.dids.carol diff --git a/packages/pds/tests/proxied/views.test.ts b/packages/pds/tests/proxied/views.test.ts index 94b76719d70..b0fd3e6e17a 100644 --- a/packages/pds/tests/proxied/views.test.ts +++ b/packages/pds/tests/proxied/views.test.ts @@ -19,7 +19,7 @@ describe('proxies view requests', () => { }) agent = network.pds.getClient() sc = network.getSeedClient() - await basicSeed(sc, { addModLabels: true }) + await basicSeed(sc, { addModLabels: network.bsky }) alice = sc.dids.alice bob = sc.dids.bob carol = sc.dids.carol @@ -79,7 +79,7 @@ describe('proxies view requests', () => { { did: sc.dids.carol, order: 2 }, { did: sc.dids.dan, order: 3 }, ] - await network.bsky.ctx.db + await network.bsky.db .getPrimary() .db.insertInto('suggested_follow') .values(suggestions) diff --git a/packages/pds/tests/seeds/basic.ts b/packages/pds/tests/seeds/basic.ts index 1085e2b381e..31195c5b9ba 100644 --- a/packages/pds/tests/seeds/basic.ts +++ b/packages/pds/tests/seeds/basic.ts @@ -1,10 +1,10 @@ -import { SeedClient } from '@atproto/dev-env' +import { SeedClient, TestBsky } from '@atproto/dev-env' import { ids } from '../../src/lexicon/lexicons' import usersSeed from './users' export default async ( sc: SeedClient, - opts?: { inviteCode?: string; addModLabels?: boolean }, + opts?: { inviteCode?: string; addModLabels?: TestBsky }, ) => { await usersSeed(sc, opts) @@ -134,24 +134,7 @@ export default async ( await sc.repost(dan, alicesReplyToBob.ref) if (opts?.addModLabels) { - await sc.agent.com.atproto.admin.emitModerationEvent( - { - event: { - createLabelVals: ['repo-action-label'], - negateLabelVals: [], - $type: 'com.atproto.admin.defs#modEventLabel', - }, - subject: { - $type: 'com.atproto.admin.defs#repoRef', - did: dan, - }, - createdBy: 'did:example:admin', - }, - { - encoding: 'application/json', - headers: sc.adminAuthHeaders(), - }, - ) + await createLabel(opts.addModLabels, { did: dan, val: 'repo-action-label' }) } return sc @@ -169,3 +152,21 @@ export const replies = { bob: ['hear that label_me label_me_2'], carol: ['of course'], } + +const createLabel = async ( + bsky: TestBsky, + opts: { did: string; val: string }, +) => { + await bsky.db + .getPrimary() + .db.insertInto('label') + .values({ + uri: opts.did, + cid: '', + val: opts.val, + cts: new Date().toISOString(), + neg: false, + src: 'did:example:labeler', + }) + .execute() +} From 3b85cbfe1ecebd08152f430659cf98926c4a76d8 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 19:04:40 -0500 Subject: [PATCH 12/17] fix --- services/bsky/api.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/bsky/api.js b/services/bsky/api.js index 38d9c077ab1..df32571c67d 100644 --- a/services/bsky/api.js +++ b/services/bsky/api.js @@ -11,7 +11,7 @@ require('dd-trace') // Only works with commonjs }) // Tracer code above must come before anything else -const path = require('nose:path') +const path = require('node:path') const assert = require('node:assert') const { Secp256k1Keypair } = require('@atproto/crypto') const { ServerConfig, BskyAppView, makeAlgos } = require('@atproto/bsky') From 3b9a5cb2501a42fb1e508a3ec75d30ab94110869 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Thu, 21 Dec 2023 19:12:40 -0500 Subject: [PATCH 13/17] fix bsky entrypoint deps --- pnpm-lock.yaml | 39 ++++++++++++-------------------------- services/bsky/package.json | 3 ++- 2 files changed, 14 insertions(+), 28 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1fc12a3e242..97c9d2b8bbe 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -422,7 +422,7 @@ importers: devDependencies: ts-node: specifier: ^10.8.1 - version: 10.8.2(@swc/core@1.3.42)(@types/node@20.10.4)(typescript@5.3.3) + version: 10.8.2(@swc/core@1.3.42)(@types/node@18.17.8)(typescript@5.3.3) packages/identity: dependencies: @@ -774,12 +774,15 @@ importers: services/bsky: dependencies: - '@atproto/aws': - specifier: workspace:^ - version: link:../../packages/aws '@atproto/bsky': specifier: workspace:^ version: link:../../packages/bsky + '@atproto/crypto': + specifier: workspace:^ + version: link:../../packages/crypto + '@atproto/identity': + specifier: workspace:^ + version: link:../../packages/identity dd-trace: specifier: 3.13.2 version: 3.13.2 @@ -803,7 +806,7 @@ importers: version: 4.20.0 opentelemetry-plugin-better-sqlite3: specifier: ^1.1.0 - version: 1.1.0(better-sqlite3@9.2.2) + version: 1.1.0(better-sqlite3@7.6.2) packages: @@ -5761,12 +5764,6 @@ packages: /@types/node@18.17.8: resolution: {integrity: sha512-Av/7MqX/iNKwT9Tr60V85NqMnsmh8ilfJoBlIVibkXfitk9Q22D9Y5mSpm+FvG5DET7EbVfB40bOiLzKgYFgPw==} - /@types/node@20.10.4: - resolution: {integrity: sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==} - dependencies: - undici-types: 5.26.5 - dev: true - /@types/nodemailer@6.4.6: resolution: {integrity: sha512-pD6fL5GQtUKvD2WnPmg5bC2e8kWCAPDwMPmHe/ohQbW+Dy0EcHgZ2oCSuPlWNqk74LS5BVMig1SymQbFMPPK3w==} dependencies: @@ -6378,14 +6375,6 @@ packages: bindings: 1.5.0 prebuild-install: 7.1.1 - /better-sqlite3@9.2.2: - resolution: {integrity: sha512-qwjWB46il0lsDkeB4rSRI96HyDQr8sxeu1MkBVLMrwusq1KRu4Bpt1TMI+8zIJkDUtZ3umjAkaEjIlokZKWCQw==} - requiresBuild: true - dependencies: - bindings: 1.5.0 - prebuild-install: 7.1.1 - dev: false - /big-integer@1.6.51: resolution: {integrity: sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==} engines: {node: '>=0.6'} @@ -9828,7 +9817,7 @@ packages: mimic-fn: 2.1.0 dev: true - /opentelemetry-plugin-better-sqlite3@1.1.0(better-sqlite3@9.2.2): + /opentelemetry-plugin-better-sqlite3@1.1.0(better-sqlite3@7.6.2): resolution: {integrity: sha512-yd+mgaB5W5JxzcQt9TvX1VIrusqtbbeuxSoZ6KQe4Ra0J/Kqkp6kz7dg0VQUU5+cenOWkza6xtvsT0KGXI03HA==} peerDependencies: better-sqlite3: ^7.1.1 || ^8.0.0 || ^9.0.0 @@ -9837,7 +9826,7 @@ packages: '@opentelemetry/core': 1.18.1(@opentelemetry/api@1.7.0) '@opentelemetry/instrumentation': 0.44.0(@opentelemetry/api@1.7.0) '@opentelemetry/semantic-conventions': 1.18.1 - better-sqlite3: 9.2.2 + better-sqlite3: 7.6.2 transitivePeerDependencies: - supports-color dev: false @@ -11249,7 +11238,7 @@ packages: yn: 3.1.1 dev: true - /ts-node@10.8.2(@swc/core@1.3.42)(@types/node@20.10.4)(typescript@5.3.3): + /ts-node@10.8.2(@swc/core@1.3.42)(@types/node@18.17.8)(typescript@5.3.3): resolution: {integrity: sha512-LYdGnoGddf1D6v8REPtIH+5iq/gTDuZqv2/UJUU7tKjuEU8xVZorBM+buCGNjj+pGEud+sOoM4CX3/YzINpENA==} hasBin: true peerDependencies: @@ -11269,7 +11258,7 @@ packages: '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 - '@types/node': 20.10.4 + '@types/node': 18.17.8 acorn: 8.10.0 acorn-walk: 8.2.0 arg: 4.1.3 @@ -11436,10 +11425,6 @@ packages: which-boxed-primitive: 1.0.2 dev: true - /undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - dev: true - /undici@5.28.2: resolution: {integrity: sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==} engines: {node: '>=14.0'} diff --git a/services/bsky/package.json b/services/bsky/package.json index 65de10674dc..2c4da882b62 100644 --- a/services/bsky/package.json +++ b/services/bsky/package.json @@ -2,8 +2,9 @@ "name": "bsky-app-view-service", "private": true, "dependencies": { - "@atproto/aws": "workspace:^", "@atproto/bsky": "workspace:^", + "@atproto/crypto": "workspace:^", + "@atproto/identity": "workspace:^", "dd-trace": "3.13.2" } } From 1da49ccb9d70ac68f7f381eaf150c2583d1ebaf7 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Fri, 29 Dec 2023 10:19:25 -0500 Subject: [PATCH 14/17] move dataplane mock tests into their own dir --- .../__snapshots__/indexing.test.ts.snap | 0 packages/bsky/tests/{ => data-plane}/db.test.ts | 2 +- packages/bsky/tests/{ => data-plane}/did-cache.test.ts | 4 ++-- .../tests/{ => data-plane}/duplicate-records.test.ts | 4 ++-- .../tests/{ => data-plane}/handle-invalidation.test.ts | 2 +- packages/bsky/tests/{ => data-plane}/indexing.test.ts | 10 +++++----- .../tests/{ => data-plane}/subscription/repo.test.ts | 10 +++++----- .../tests/{ => data-plane}/subscription/util.test.ts | 4 ++-- 8 files changed, 18 insertions(+), 18 deletions(-) rename packages/bsky/tests/{ => data-plane}/__snapshots__/indexing.test.ts.snap (100%) rename packages/bsky/tests/{ => data-plane}/db.test.ts (98%) rename packages/bsky/tests/{ => data-plane}/did-cache.test.ts (98%) rename packages/bsky/tests/{ => data-plane}/duplicate-records.test.ts (97%) rename packages/bsky/tests/{ => data-plane}/handle-invalidation.test.ts (98%) rename packages/bsky/tests/{ => data-plane}/indexing.test.ts (99%) rename packages/bsky/tests/{ => data-plane}/subscription/repo.test.ts (94%) rename packages/bsky/tests/{ => data-plane}/subscription/util.test.ts (98%) diff --git a/packages/bsky/tests/__snapshots__/indexing.test.ts.snap b/packages/bsky/tests/data-plane/__snapshots__/indexing.test.ts.snap similarity index 100% rename from packages/bsky/tests/__snapshots__/indexing.test.ts.snap rename to packages/bsky/tests/data-plane/__snapshots__/indexing.test.ts.snap diff --git a/packages/bsky/tests/db.test.ts b/packages/bsky/tests/data-plane/db.test.ts similarity index 98% rename from packages/bsky/tests/db.test.ts rename to packages/bsky/tests/data-plane/db.test.ts index 4b1e5ad507e..1b0787828ae 100644 --- a/packages/bsky/tests/db.test.ts +++ b/packages/bsky/tests/data-plane/db.test.ts @@ -1,7 +1,7 @@ import { sql } from 'kysely' import { wait } from '@atproto/common' import { TestNetwork } from '@atproto/dev-env' -import { Database, PrimaryDatabase } from '../src' +import { Database, PrimaryDatabase } from '../../src' describe('db', () => { let network: TestNetwork diff --git a/packages/bsky/tests/did-cache.test.ts b/packages/bsky/tests/data-plane/did-cache.test.ts similarity index 98% rename from packages/bsky/tests/did-cache.test.ts rename to packages/bsky/tests/data-plane/did-cache.test.ts index d140b529e9e..e5192ed3d97 100644 --- a/packages/bsky/tests/did-cache.test.ts +++ b/packages/bsky/tests/data-plane/did-cache.test.ts @@ -1,8 +1,8 @@ import { wait } from '@atproto/common' import { IdResolver } from '@atproto/identity' import { TestNetwork, SeedClient } from '@atproto/dev-env' -import userSeed from './seeds/users' -import { DidSqlCache } from '../src' +import userSeed from '../seeds/users' +import { DidSqlCache } from '../../src' describe('did cache', () => { let network: TestNetwork diff --git a/packages/bsky/tests/duplicate-records.test.ts b/packages/bsky/tests/data-plane/duplicate-records.test.ts similarity index 97% rename from packages/bsky/tests/duplicate-records.test.ts rename to packages/bsky/tests/data-plane/duplicate-records.test.ts index bd6ed269227..d9570d03b37 100644 --- a/packages/bsky/tests/duplicate-records.test.ts +++ b/packages/bsky/tests/data-plane/duplicate-records.test.ts @@ -2,8 +2,8 @@ import { AtUri } from '@atproto/syntax' import { cidForCbor, TID } from '@atproto/common' import { WriteOpAction } from '@atproto/repo' import { TestNetwork } from '@atproto/dev-env' -import * as lex from '../src/lexicon/lexicons' -import { Database, PrimaryDatabase } from '../src' +import * as lex from '../../src/lexicon/lexicons' +import { Database, PrimaryDatabase } from '../../src' describe('duplicate record', () => { let network: TestNetwork diff --git a/packages/bsky/tests/handle-invalidation.test.ts b/packages/bsky/tests/data-plane/handle-invalidation.test.ts similarity index 98% rename from packages/bsky/tests/handle-invalidation.test.ts rename to packages/bsky/tests/data-plane/handle-invalidation.test.ts index 3919baf899d..fa20a5f02bd 100644 --- a/packages/bsky/tests/handle-invalidation.test.ts +++ b/packages/bsky/tests/data-plane/handle-invalidation.test.ts @@ -1,7 +1,7 @@ import { DAY } from '@atproto/common' import { TestNetwork, SeedClient } from '@atproto/dev-env' import { AtpAgent } from '@atproto/api' -import userSeed from './seeds/users' +import userSeed from '../seeds/users' describe('handle invalidation', () => { let network: TestNetwork diff --git a/packages/bsky/tests/indexing.test.ts b/packages/bsky/tests/data-plane/indexing.test.ts similarity index 99% rename from packages/bsky/tests/indexing.test.ts rename to packages/bsky/tests/data-plane/indexing.test.ts index 6c86beba966..e141491dd34 100644 --- a/packages/bsky/tests/indexing.test.ts +++ b/packages/bsky/tests/data-plane/indexing.test.ts @@ -12,11 +12,11 @@ import AtpAgent, { AppBskyGraphFollow, } from '@atproto/api' import { TestNetwork, SeedClient } from '@atproto/dev-env' -import { forSnapshot } from './_util' -import usersSeed from './seeds/users' -import basicSeed from './seeds/basic' -import { ids } from '../src/lexicon/lexicons' -import { Database } from '../src/data-plane/server/db' +import { forSnapshot } from '../_util' +import usersSeed from '../seeds/users' +import basicSeed from '../seeds/basic' +import { ids } from '../../src/lexicon/lexicons' +import { Database } from '../../src/data-plane/server/db' describe('indexing', () => { let network: TestNetwork diff --git a/packages/bsky/tests/subscription/repo.test.ts b/packages/bsky/tests/data-plane/subscription/repo.test.ts similarity index 94% rename from packages/bsky/tests/subscription/repo.test.ts rename to packages/bsky/tests/data-plane/subscription/repo.test.ts index 5b03afb4b2c..a80ab92aa33 100644 --- a/packages/bsky/tests/subscription/repo.test.ts +++ b/packages/bsky/tests/data-plane/subscription/repo.test.ts @@ -4,11 +4,11 @@ import { CommitData } from '@atproto/repo' import { PreparedWrite } from '@atproto/pds/src/repo' import * as sequencer from '@atproto/pds/src/sequencer' import { cborDecode, cborEncode } from '@atproto/common' -import { DatabaseSchemaType } from '../../src/data-plane/server/db/database-schema' -import { ids } from '../../src/lexicon/lexicons' -import { forSnapshot } from '../_util' -import { Database } from '../../src' -import basicSeed from '../seeds/basic' +import { DatabaseSchemaType } from '../../../src/data-plane/server/db/database-schema' +import { ids } from '../../../src/lexicon/lexicons' +import { forSnapshot } from '../../_util' +import { Database } from '../../../src' +import basicSeed from '../../seeds/basic' describe('sync', () => { let network: TestNetwork diff --git a/packages/bsky/tests/subscription/util.test.ts b/packages/bsky/tests/data-plane/subscription/util.test.ts similarity index 98% rename from packages/bsky/tests/subscription/util.test.ts rename to packages/bsky/tests/data-plane/subscription/util.test.ts index d0babb5c3dc..0aba097c334 100644 --- a/packages/bsky/tests/subscription/util.test.ts +++ b/packages/bsky/tests/data-plane/subscription/util.test.ts @@ -3,8 +3,8 @@ import { ConsecutiveList, LatestQueue, PartitionedQueue, -} from '../../src/data-plane/server/subscription/util' -import { randomStr } from '../../../crypto/src' +} from '../../../src/data-plane/server/subscription/util' +import { randomStr } from '../../../../crypto/src' describe('subscription utils', () => { describe('ConsecutiveList', () => { From 62603e09d0feefa8c842f3f9df1bf4876a4a8da9 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Fri, 29 Dec 2023 10:36:43 -0500 Subject: [PATCH 15/17] cover label hydration through timeline test --- .../views/__snapshots__/timeline.test.ts.snap | 257 +++++++++++++++--- packages/bsky/tests/views/timeline.test.ts | 37 ++- 2 files changed, 245 insertions(+), 49 deletions(-) diff --git a/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap b/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap index e0c88a23363..d7af4a6a91d 100644 --- a/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap @@ -273,7 +273,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -839,7 +848,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(6)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(8)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -1018,7 +1036,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(15)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(15)", @@ -1042,7 +1060,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(14)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 2, @@ -1119,7 +1137,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(15)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 0, @@ -1301,7 +1319,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1516,7 +1551,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(11)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(11)", @@ -1541,7 +1576,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(8)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(8)", @@ -1720,7 +1755,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1959,7 +2011,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -2159,7 +2228,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(8)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(8)", @@ -2239,7 +2308,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(11)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(13)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -2406,7 +2484,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(11)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(11)", @@ -2431,7 +2509,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(8)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(8)", @@ -2597,7 +2675,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(11)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(11)", @@ -2621,7 +2699,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(8)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 2, @@ -2698,7 +2776,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(11)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 0, @@ -2853,7 +2931,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(4)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(4)", @@ -2878,7 +2956,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(2)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(2)", @@ -3056,7 +3134,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(9)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(10)", + "val": "test-label", + }, + Object { + "cid": "cids(9)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(10)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -3304,7 +3399,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(9)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(10)", + "val": "test-label", + }, + Object { + "cid": "cids(9)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(10)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -3513,7 +3625,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(2)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(2)", @@ -3593,7 +3705,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(11)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(13)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -3746,7 +3867,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(4)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(4)", @@ -3770,7 +3891,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(2)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 2, @@ -3845,7 +3966,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(4)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 0, @@ -4001,7 +4122,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(2)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(2)", @@ -4026,7 +4147,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(1)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(1)", @@ -4206,7 +4327,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(9)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(10)", + "val": "test-label", + }, + Object { + "cid": "cids(9)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(10)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -4498,7 +4636,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(1)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(1)", @@ -4578,7 +4716,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(11)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(13)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -4703,7 +4850,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(2)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(2)", @@ -4727,7 +4874,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(1)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 2, @@ -4944,7 +5091,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -5120,7 +5284,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -5354,7 +5535,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(12)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(12)", @@ -5379,7 +5560,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(11)", - "val": "kind", + "val": "test-label-3", }, ], "uri": "record(11)", @@ -5510,7 +5691,7 @@ Array [ "neg": false, "src": "did:example:labeler", "uri": "record(12)", - "val": "kind", + "val": "test-label-3", }, ], "likeCount": 0, diff --git a/packages/bsky/tests/views/timeline.test.ts b/packages/bsky/tests/views/timeline.test.ts index 93d1190b0eb..ba73b9adc7b 100644 --- a/packages/bsky/tests/views/timeline.test.ts +++ b/packages/bsky/tests/views/timeline.test.ts @@ -31,18 +31,33 @@ describe('timeline views', () => { bob = sc.dids.bob carol = sc.dids.carol dan = sc.dids.dan - // Label posts as "kind" to check labels on embed views - const labelPostA = sc.posts[bob][0].ref - const labelPostB = sc.posts[carol][0].ref - await createLabel(network.bsky.db.getPrimary(), { - val: 'kind', - uri: labelPostA.uriStr, - cid: labelPostA.cidStr, + // add some labels to test label hydration + const db = network.bsky.db.getPrimary() + await createLabel(db, { + val: 'test-label', + uri: sc.posts[alice][2].ref.uriStr, + cid: sc.posts[alice][2].ref.cidStr, }) - await createLabel(network.bsky.db.getPrimary(), { - val: 'kind', - uri: labelPostB.uriStr, - cid: labelPostB.cidStr, + await createLabel(db, { + val: 'test-label', + uri: sc.replies[bob][0].ref.uriStr, + cid: sc.replies[bob][0].ref.cidStr, + }) + await createLabel(db, { + val: 'test-label-2', + uri: sc.replies[bob][0].ref.uriStr, + cid: sc.replies[bob][0].ref.cidStr, + }) + // covers label hydration on embeds + await createLabel(db, { + val: 'test-label-3', + uri: sc.posts[bob][0].ref.uriStr, + cid: sc.posts[bob][0].ref.cidStr, + }) + await createLabel(db, { + val: 'test-label-3', + uri: sc.posts[carol][0].ref.uriStr, + cid: sc.posts[carol][0].ref.cidStr, }) }) From b40e8c607f53eeaa2a8127988d6141c8dfa9ef30 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Fri, 29 Dec 2023 10:47:48 -0500 Subject: [PATCH 16/17] bring back labels in appview tests --- .../__snapshots__/indexing.test.ts.snap | 30 +++++- packages/bsky/tests/seeds/basic.ts | 44 ++++++++- .../__snapshots__/author-feed.test.ts.snap | 98 +++++++++++++++++-- .../__snapshots__/block-lists.test.ts.snap | 11 ++- .../views/__snapshots__/blocks.test.ts.snap | 30 +++++- .../__snapshots__/list-feed.test.ts.snap | 49 +++++++++- .../__snapshots__/mute-lists.test.ts.snap | 19 +++- .../views/__snapshots__/mutes.test.ts.snap | 19 +++- .../__snapshots__/notifications.test.ts.snap | 49 +++++++++- .../views/__snapshots__/thread.test.ts.snap | 95 +++++++++++++++++- packages/bsky/tests/views/timeline.test.ts | 18 +--- packages/dev-env/src/network-no-appview.ts | 2 +- packages/dev-env/src/seed-client.ts | 6 +- 13 files changed, 424 insertions(+), 46 deletions(-) diff --git a/packages/bsky/tests/data-plane/__snapshots__/indexing.test.ts.snap b/packages/bsky/tests/data-plane/__snapshots__/indexing.test.ts.snap index 881dbb59b50..88c02c6e3e0 100644 --- a/packages/bsky/tests/data-plane/__snapshots__/indexing.test.ts.snap +++ b/packages/bsky/tests/data-plane/__snapshots__/indexing.test.ts.snap @@ -108,7 +108,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label", + }, + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -295,7 +312,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(6)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(6)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/seeds/basic.ts b/packages/bsky/tests/seeds/basic.ts index b935afd3d6f..87311b78ccc 100644 --- a/packages/bsky/tests/seeds/basic.ts +++ b/packages/bsky/tests/seeds/basic.ts @@ -1,8 +1,12 @@ -import { SeedClient } from '@atproto/dev-env' +import { SeedClient, TestNetwork, TestNetworkNoAppView } from '@atproto/dev-env' import { ids } from '../../src/lexicon/lexicons' import usersSeed from './users' +import { PrimaryDatabase } from '../../src' -export default async (sc: SeedClient, users = true) => { +export default async ( + sc: SeedClient, + users = true, +) => { if (users) await usersSeed(sc) const alice = sc.dids.alice @@ -130,6 +134,25 @@ export default async (sc: SeedClient, users = true) => { await sc.repost(dan, sc.posts[alice][1].ref) await sc.repost(dan, alicesReplyToBob.ref) + if (sc.network instanceof TestNetwork) { + const db = sc.network.bsky.db.getPrimary() + await createLabel(db, { + val: 'test-label', + uri: sc.posts[alice][2].ref.uriStr, + cid: sc.posts[alice][2].ref.cidStr, + }) + await createLabel(db, { + val: 'test-label', + uri: sc.replies[bob][0].ref.uriStr, + cid: sc.replies[bob][0].ref.cidStr, + }) + await createLabel(db, { + val: 'test-label-2', + uri: sc.replies[bob][0].ref.uriStr, + cid: sc.replies[bob][0].ref.cidStr, + }) + } + return sc } @@ -145,3 +168,20 @@ export const replies = { bob: ['hear that label_me label_me_2'], carol: ['of course'], } + +const createLabel = async ( + db: PrimaryDatabase, + opts: { uri: string; cid: string; val: string }, +) => { + await db.db + .insertInto('label') + .values({ + uri: opts.uri, + cid: opts.cid, + val: opts.val, + cts: new Date().toISOString(), + neg: false, + src: 'did:example:labeler', + }) + .execute() +} diff --git a/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap b/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap index 3f775a98661..f7a61d82547 100644 --- a/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap @@ -84,7 +84,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -310,7 +327,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(6)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(6)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -467,7 +493,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(0)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(0)", + "val": "test-label", + }, + Object { + "cid": "cids(0)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(0)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1188,7 +1231,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1609,7 +1669,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1838,7 +1915,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(6)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(8)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap b/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap index 93a646c8a16..e5e068353ea 100644 --- a/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/block-lists.test.ts.snap @@ -98,7 +98,16 @@ Object { }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(0)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(0)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap b/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap index f47f3a10910..079cdbe60a5 100644 --- a/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap @@ -98,7 +98,16 @@ Object { }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(0)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(0)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", @@ -287,7 +296,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(6)", + "val": "test-label", + }, + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(6)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap b/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap index 56fd2bf4899..d6712c89c56 100644 --- a/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap @@ -85,7 +85,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -199,7 +216,24 @@ Array [ ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -477,7 +511,16 @@ Array [ }, }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(6)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(8)", + "val": "test-label", + }, + ], "likeCount": 2, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap b/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap index 31eb91a9d29..a7b34c07dc0 100644 --- a/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap @@ -237,7 +237,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(9)", + "val": "test-label", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(9)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap b/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap index 58849599bfe..0e1c14c2696 100644 --- a/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap @@ -212,7 +212,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap b/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap index 2f18110bc14..bce3d4e5139 100644 --- a/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap @@ -245,7 +245,24 @@ Array [ "cid": "cids(12)", "indexedAt": "1970-01-01T00:00:00.000Z", "isRead": false, - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(12)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(14)", + "val": "test-label", + }, + Object { + "cid": "cids(12)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(14)", + "val": "test-label-2", + }, + ], "reason": "reply", "reasonSubject": "record(4)", "record": Object { @@ -672,7 +689,24 @@ Array [ "cid": "cids(15)", "indexedAt": "1970-01-01T00:00:00.000Z", "isRead": false, - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(15)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(17)", + "val": "test-label", + }, + Object { + "cid": "cids(15)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(17)", + "val": "test-label-2", + }, + ], "reason": "reply", "reasonSubject": "record(4)", "record": Object { @@ -882,7 +916,16 @@ Array [ "cid": "cids(5)", "indexedAt": "1970-01-01T00:00:00.000Z", "isRead": false, - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(4)", + "val": "test-label", + }, + ], "reason": "quote", "reasonSubject": "record(1)", "record": Object { diff --git a/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap b/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap index cb18eeafbe4..85c3493e092 100644 --- a/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap @@ -79,7 +79,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -295,7 +312,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(7)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(7)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -516,7 +550,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(7)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(7)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1051,7 +1102,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", @@ -1234,7 +1302,24 @@ Object { ], }, "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label", + }, + Object { + "cid": "cids(3)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(5)", + "val": "test-label-2", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", diff --git a/packages/bsky/tests/views/timeline.test.ts b/packages/bsky/tests/views/timeline.test.ts index ba73b9adc7b..70a3cfad21d 100644 --- a/packages/bsky/tests/views/timeline.test.ts +++ b/packages/bsky/tests/views/timeline.test.ts @@ -31,24 +31,8 @@ describe('timeline views', () => { bob = sc.dids.bob carol = sc.dids.carol dan = sc.dids.dan - // add some labels to test label hydration - const db = network.bsky.db.getPrimary() - await createLabel(db, { - val: 'test-label', - uri: sc.posts[alice][2].ref.uriStr, - cid: sc.posts[alice][2].ref.cidStr, - }) - await createLabel(db, { - val: 'test-label', - uri: sc.replies[bob][0].ref.uriStr, - cid: sc.replies[bob][0].ref.cidStr, - }) - await createLabel(db, { - val: 'test-label-2', - uri: sc.replies[bob][0].ref.uriStr, - cid: sc.replies[bob][0].ref.cidStr, - }) // covers label hydration on embeds + const db = network.bsky.db.getPrimary() await createLabel(db, { val: 'test-label-3', uri: sc.posts[bob][0].ref.uriStr, diff --git a/packages/dev-env/src/network-no-appview.ts b/packages/dev-env/src/network-no-appview.ts index 30b978b5b79..bf7df078262 100644 --- a/packages/dev-env/src/network-no-appview.ts +++ b/packages/dev-env/src/network-no-appview.ts @@ -32,7 +32,7 @@ export class TestNetworkNoAppView { return fg } - getSeedClient(): SeedClient { + getSeedClient(): SeedClient { const agent = this.pds.getClient() return new SeedClient(this, agent) } diff --git a/packages/dev-env/src/seed-client.ts b/packages/dev-env/src/seed-client.ts index 71dfebd53c0..e1ac154a519 100644 --- a/packages/dev-env/src/seed-client.ts +++ b/packages/dev-env/src/seed-client.ts @@ -46,7 +46,9 @@ export class RecordRef { } } -export class SeedClient { +export class SeedClient< + Network extends TestNetworkNoAppView = TestNetworkNoAppView, +> { accounts: Record< string, { @@ -82,7 +84,7 @@ export class SeedClient { > dids: Record - constructor(public network: TestNetworkNoAppView, public agent: AtpAgent) { + constructor(public network: Network, public agent: AtpAgent) { this.accounts = {} this.profiles = {} this.follows = {} From 9b2153fcd5c4c36682ccf16f38d81c898eccd1a0 Mon Sep 17 00:00:00 2001 From: Devin Ivy Date: Fri, 29 Dec 2023 11:22:34 -0500 Subject: [PATCH 17/17] remove unused db primary/replica/coordinator from bsky dataplane --- .../bsky/src/data-plane/server/background.ts | 6 +- .../src/data-plane/server/db/coordinator.ts | 107 ---------- packages/bsky/src/data-plane/server/db/db.ts | 117 ++++++++++- .../bsky/src/data-plane/server/db/index.ts | 2 - .../bsky/src/data-plane/server/db/primary.ts | 184 ------------------ .../bsky/src/data-plane/server/did-cache.ts | 4 +- .../src/data-plane/server/indexing/index.ts | 6 +- .../server/indexing/plugins/block.ts | 4 +- .../server/indexing/plugins/feed-generator.ts | 4 +- .../server/indexing/plugins/follow.ts | 4 +- .../server/indexing/plugins/like.ts | 4 +- .../server/indexing/plugins/list-block.ts | 4 +- .../server/indexing/plugins/list-item.ts | 4 +- .../server/indexing/plugins/list.ts | 4 +- .../server/indexing/plugins/post.ts | 4 +- .../server/indexing/plugins/profile.ts | 4 +- .../server/indexing/plugins/repost.ts | 4 +- .../server/indexing/plugins/thread-gate.ts | 4 +- .../data-plane/server/indexing/processor.ts | 6 +- .../data-plane/server/subscription/index.ts | 4 +- packages/bsky/src/index.ts | 6 +- packages/bsky/tests/data-plane/db.test.ts | 6 +- .../bsky/tests/data-plane/did-cache.test.ts | 4 +- .../data-plane/duplicate-records.test.ts | 6 +- .../data-plane/handle-invalidation.test.ts | 5 +- .../bsky/tests/data-plane/indexing.test.ts | 2 +- .../data-plane/subscription/repo.test.ts | 6 +- packages/bsky/tests/feed-generation.test.ts | 5 +- packages/bsky/tests/seeds/basic.ts | 7 +- packages/bsky/tests/server.test.ts | 3 - .../bsky/tests/views/actor-search.test.ts | 2 +- .../tests/views/suggested-follows.test.ts | 5 +- packages/bsky/tests/views/suggestions.test.ts | 5 +- packages/bsky/tests/views/timeline.test.ts | 6 +- packages/dev-env/src/bsky.ts | 24 +-- packages/dev-env/src/mock/index.ts | 8 +- packages/pds/tests/proxied/views.test.ts | 5 +- packages/pds/tests/seeds/basic.ts | 5 +- 38 files changed, 192 insertions(+), 398 deletions(-) delete mode 100644 packages/bsky/src/data-plane/server/db/coordinator.ts delete mode 100644 packages/bsky/src/data-plane/server/db/primary.ts diff --git a/packages/bsky/src/data-plane/server/background.ts b/packages/bsky/src/data-plane/server/background.ts index 0ce785b21ce..59d8ccf0ddf 100644 --- a/packages/bsky/src/data-plane/server/background.ts +++ b/packages/bsky/src/data-plane/server/background.ts @@ -1,5 +1,5 @@ import PQueue from 'p-queue' -import { PrimaryDatabase } from './db' +import { Database } from './db' import { dbLogger } from '../../logger' // A simple queue for in-process, out-of-band/backgrounded work @@ -7,7 +7,7 @@ import { dbLogger } from '../../logger' export class BackgroundQueue { queue = new PQueue() destroyed = false - constructor(public db: PrimaryDatabase) {} + constructor(public db: Database) {} add(task: Task) { if (this.destroyed) { @@ -32,4 +32,4 @@ export class BackgroundQueue { } } -type Task = (db: PrimaryDatabase) => Promise +type Task = (db: Database) => Promise diff --git a/packages/bsky/src/data-plane/server/db/coordinator.ts b/packages/bsky/src/data-plane/server/db/coordinator.ts deleted file mode 100644 index 25ef305ed5a..00000000000 --- a/packages/bsky/src/data-plane/server/db/coordinator.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { Migrator } from 'kysely' -import PrimaryDatabase from './primary' -import Database from './db' -import { PgOptions } from './types' -import { dbLogger } from '../../../logger' - -type ReplicaTag = 'timeline' | 'feed' | 'search' | 'thread' | '*' -type ReplicaOptions = PgOptions & { tags?: ReplicaTag[] } - -type CoordinatorOptions = { - schema?: string - primary: PgOptions - replicas?: ReplicaOptions[] -} - -type ReplicaGroup = { - dbs: Database[] - roundRobinIdx: number -} - -export class DatabaseCoordinator { - migrator: Migrator - destroyed = false - - private primary: PrimaryDatabase - private allReplicas: Database[] - private tagged: Record - private untagged: ReplicaGroup - private tagWarns = new Set() - - constructor(public opts: CoordinatorOptions) { - this.primary = new PrimaryDatabase({ - schema: opts.schema, - ...opts.primary, - }) - this.allReplicas = [] - this.tagged = {} - this.untagged = { - dbs: [], - roundRobinIdx: 0, - } - for (const cfg of opts.replicas ?? []) { - const db = new Database({ - schema: opts.schema, - ...cfg, - }) - this.allReplicas.push(db) - // setup different groups of replicas based on tag, each round-robins separately. - if (cfg.tags?.length) { - for (const tag of cfg.tags) { - if (tag === '*') { - this.untagged.dbs.push(db) - } else { - this.tagged[tag] ??= { - dbs: [], - roundRobinIdx: 0, - } - this.tagged[tag].dbs.push(db) - } - } - } else { - this.untagged.dbs.push(db) - } - } - // guarantee there is always a replica around to service any query, falling back to primary. - if (!this.untagged.dbs.length) { - if (this.allReplicas.length) { - this.untagged.dbs = [...this.allReplicas] - } else { - this.untagged.dbs = [this.primary] - } - } - } - - getPrimary(): PrimaryDatabase { - return this.primary - } - - getReplicas(): Database[] { - return this.allReplicas - } - - getReplica(tag?: ReplicaTag): Database { - if (tag && this.tagged[tag]) { - return nextDb(this.tagged[tag]) - } - if (tag && !this.tagWarns.has(tag)) { - this.tagWarns.add(tag) - dbLogger.warn({ tag }, 'no replica for tag, falling back to any replica') - } - return nextDb(this.untagged) - } - - async close(): Promise { - await Promise.all([ - this.primary.close(), - ...this.allReplicas.map((db) => db.close()), - ]) - } -} - -// @NOTE mutates group incrementing roundRobinIdx -const nextDb = (group: ReplicaGroup) => { - const db = group.dbs[group.roundRobinIdx] - group.roundRobinIdx = (group.roundRobinIdx + 1) % group.dbs.length - return db -} diff --git a/packages/bsky/src/data-plane/server/db/db.ts b/packages/bsky/src/data-plane/server/db/db.ts index 35fa6d0857a..6411938d69d 100644 --- a/packages/bsky/src/data-plane/server/db/db.ts +++ b/packages/bsky/src/data-plane/server/db/db.ts @@ -1,24 +1,40 @@ import assert from 'assert' -import { Kysely, PostgresDialect } from 'kysely' +import EventEmitter from 'events' +import { + Kysely, + KyselyPlugin, + Migrator, + PluginTransformQueryArgs, + PluginTransformResultArgs, + PostgresDialect, + QueryResult, + RootOperationNode, + UnknownRow, +} from 'kysely' +import TypedEmitter from 'typed-emitter' import { Pool as PgPool, types as pgTypes } from 'pg' +import * as migrations from './migrations' import DatabaseSchema, { DatabaseSchemaType } from './database-schema' import { PgOptions } from './types' import { dbLogger } from '../../../logger' +import { CtxMigrationProvider } from './migrations/provider' export class Database { pool: PgPool db: DatabaseSchema + migrator: Migrator + txEvt = new EventEmitter() as TxnEmitter destroyed = false - isPrimary = false constructor( public opts: PgOptions, - instances?: { db: DatabaseSchema; pool: PgPool }, + instances?: { db: DatabaseSchema; pool: PgPool; migrator: Migrator }, ) { // if instances are provided, use those if (instances) { this.db = instances.db this.pool = instances.pool + this.migrator = instances.migrator return } @@ -56,12 +72,42 @@ export class Database { this.db = new Kysely({ dialect: new PostgresDialect({ pool }), }) + this.migrator = new Migrator({ + db: this.db, + migrationTableSchema: opts.schema, + provider: new CtxMigrationProvider(migrations, 'pg'), + }) } get schema(): string | undefined { return this.opts.schema } + async transaction(fn: (db: Database) => Promise): Promise { + const leakyTxPlugin = new LeakyTxPlugin() + const { dbTxn, txRes } = await this.db + .withPlugin(leakyTxPlugin) + .transaction() + .execute(async (txn) => { + const dbTxn = new Database(this.opts, { + db: txn, + pool: this.pool, + migrator: this.migrator, + }) + const txRes = await fn(dbTxn) + .catch(async (err) => { + leakyTxPlugin.endTx() + // ensure that all in-flight queries are flushed & the connection is open + await dbTxn.db.getExecutor().provideConnection(noopAsync) + throw err + }) + .finally(() => leakyTxPlugin.endTx()) + return { dbTxn, txRes } + }) + dbTxn?.txEvt.emit('commit') + return txRes + } + get isTransaction() { return this.db.isTransaction } @@ -74,8 +120,37 @@ export class Database { assert(!this.isTransaction, 'Cannot be in a transaction') } - asPrimary(): Database { - throw new Error('Primary db required') + onCommit(fn: () => void) { + this.assertTransaction() + this.txEvt.once('commit', fn) + } + + async migrateToOrThrow(migration: string) { + if (this.schema) { + await this.db.schema.createSchema(this.schema).ifNotExists().execute() + } + const { error, results } = await this.migrator.migrateTo(migration) + if (error) { + throw error + } + if (!results) { + throw new Error('An unknown failure occurred while migrating') + } + return results + } + + async migrateToLatestOrThrow() { + if (this.schema) { + await this.db.schema.createSchema(this.schema).ifNotExists().execute() + } + const { error, results } = await this.migrator.migrateToLatest() + if (error) { + throw error + } + if (!results) { + throw new Error('An unknown failure occurred while migrating') + } + return results } async close(): Promise { @@ -89,3 +164,35 @@ export default Database const onPoolError = (err: Error) => dbLogger.error({ err }, 'db pool error') const onClientError = (err: Error) => dbLogger.error({ err }, 'db client error') + +// utils +// ------- + +class LeakyTxPlugin implements KyselyPlugin { + private txOver: boolean + + endTx() { + this.txOver = true + } + + transformQuery(args: PluginTransformQueryArgs): RootOperationNode { + if (this.txOver) { + throw new Error('tx already failed') + } + return args.node + } + + async transformResult( + args: PluginTransformResultArgs, + ): Promise> { + return args.result + } +} + +type TxnEmitter = TypedEmitter + +type TxnEvents = { + commit: () => void +} + +const noopAsync = async () => {} diff --git a/packages/bsky/src/data-plane/server/db/index.ts b/packages/bsky/src/data-plane/server/db/index.ts index 1c5886fb10e..1beb455f5e3 100644 --- a/packages/bsky/src/data-plane/server/db/index.ts +++ b/packages/bsky/src/data-plane/server/db/index.ts @@ -1,3 +1 @@ -export * from './primary' export * from './db' -export * from './coordinator' diff --git a/packages/bsky/src/data-plane/server/db/primary.ts b/packages/bsky/src/data-plane/server/db/primary.ts deleted file mode 100644 index 61b5765967a..00000000000 --- a/packages/bsky/src/data-plane/server/db/primary.ts +++ /dev/null @@ -1,184 +0,0 @@ -import EventEmitter from 'events' -import { - Migrator, - KyselyPlugin, - PluginTransformQueryArgs, - PluginTransformResultArgs, - RootOperationNode, - QueryResult, - UnknownRow, - sql, -} from 'kysely' -import { Pool as PgPool } from 'pg' -import TypedEmitter from 'typed-emitter' -import { wait } from '@atproto/common' -import DatabaseSchema from './database-schema' -import * as migrations from './migrations' -import { CtxMigrationProvider } from './migrations/provider' -import { dbLogger as log } from '../../../logger' -import { PgOptions } from './types' -import { Database } from './db' - -export class PrimaryDatabase extends Database { - migrator: Migrator - txEvt = new EventEmitter() as TxnEmitter - destroyed = false - isPrimary = true - - constructor( - public opts: PgOptions, - instances?: { db: DatabaseSchema; pool: PgPool }, - ) { - super(opts, instances) - this.migrator = new Migrator({ - db: this.db, - migrationTableSchema: opts.schema, - provider: new CtxMigrationProvider(migrations, 'pg'), - }) - } - - static is(db: Database): db is PrimaryDatabase { - return db.isPrimary - } - - asPrimary(): PrimaryDatabase { - return this - } - - async transaction(fn: (db: PrimaryDatabase) => Promise): Promise { - const leakyTxPlugin = new LeakyTxPlugin() - const { dbTxn, txRes } = await this.db - .withPlugin(leakyTxPlugin) - .transaction() - .execute(async (txn) => { - const dbTxn = new PrimaryDatabase(this.opts, { - db: txn, - pool: this.pool, - }) - const txRes = await fn(dbTxn) - .catch(async (err) => { - leakyTxPlugin.endTx() - // ensure that all in-flight queries are flushed & the connection is open - await dbTxn.db.getExecutor().provideConnection(noopAsync) - throw err - }) - .finally(() => leakyTxPlugin.endTx()) - return { dbTxn, txRes } - }) - dbTxn?.txEvt.emit('commit') - return txRes - } - - onCommit(fn: () => void) { - this.assertTransaction() - this.txEvt.once('commit', fn) - } - - async close(): Promise { - if (this.destroyed) return - await this.db.destroy() - this.destroyed = true - } - - async migrateToOrThrow(migration: string) { - if (this.schema) { - await this.db.schema.createSchema(this.schema).ifNotExists().execute() - } - const { error, results } = await this.migrator.migrateTo(migration) - if (error) { - throw error - } - if (!results) { - throw new Error('An unknown failure occurred while migrating') - } - return results - } - - async migrateToLatestOrThrow() { - if (this.schema) { - await this.db.schema.createSchema(this.schema).ifNotExists().execute() - } - const { error, results } = await this.migrator.migrateToLatest() - if (error) { - throw error - } - if (!results) { - throw new Error('An unknown failure occurred while migrating') - } - return results - } - - async maintainMaterializedViews(opts: { - views: string[] - intervalSec: number - signal: AbortSignal - }) { - const { views, intervalSec, signal } = opts - while (!signal.aborted) { - // super basic synchronization by agreeing when the intervals land relative to unix timestamp - const now = Date.now() - const intervalMs = 1000 * intervalSec - const nextIteration = Math.ceil(now / intervalMs) - const nextInMs = nextIteration * intervalMs - now - await wait(nextInMs) - if (signal.aborted) break - await Promise.all( - views.map(async (view) => { - try { - await this.refreshMaterializedView(view) - log.info( - { view, time: new Date().toISOString() }, - 'materialized view refreshed', - ) - } catch (err) { - log.error( - { view, err, time: new Date().toISOString() }, - 'materialized view refresh failed', - ) - } - }), - ) - } - } - - async refreshMaterializedView(view: string) { - const { ref } = this.db.dynamic - await sql`refresh materialized view concurrently ${ref(view)}`.execute( - this.db, - ) - } -} - -export default PrimaryDatabase - -// utils -// ------- - -class LeakyTxPlugin implements KyselyPlugin { - private txOver: boolean - - endTx() { - this.txOver = true - } - - transformQuery(args: PluginTransformQueryArgs): RootOperationNode { - if (this.txOver) { - throw new Error('tx already failed') - } - return args.node - } - - async transformResult( - args: PluginTransformResultArgs, - ): Promise> { - return args.result - } -} - -type TxnEmitter = TypedEmitter - -type TxnEvents = { - commit: () => void -} - -const noopAsync = async () => {} diff --git a/packages/bsky/src/data-plane/server/did-cache.ts b/packages/bsky/src/data-plane/server/did-cache.ts index 3fed82df5ed..2ffe3b5aa69 100644 --- a/packages/bsky/src/data-plane/server/did-cache.ts +++ b/packages/bsky/src/data-plane/server/did-cache.ts @@ -1,6 +1,6 @@ import PQueue from 'p-queue' import { CacheResult, DidCache, DidDocument } from '@atproto/identity' -import { PrimaryDatabase } from './db' +import { Database } from './db' import { excluded } from './db/util' import { dbLogger } from '../../logger' @@ -10,7 +10,7 @@ export class DidSqlCache implements DidCache { constructor( // @TODO perhaps could use both primary and non-primary. not high enough // throughput to matter right now. also may just move this over to redis before long! - public db: PrimaryDatabase, + public db: Database, public staleTTL: number, public maxTTL: number, ) { diff --git a/packages/bsky/src/data-plane/server/indexing/index.ts b/packages/bsky/src/data-plane/server/indexing/index.ts index 69fc73dc0e6..68f5ff8b721 100644 --- a/packages/bsky/src/data-plane/server/indexing/index.ts +++ b/packages/bsky/src/data-plane/server/indexing/index.ts @@ -13,7 +13,7 @@ import { AtUri } from '@atproto/syntax' import { IdResolver, getPds } from '@atproto/identity' import { DAY, HOUR } from '@atproto/common' import { ValidationError } from '@atproto/lexicon' -import { PrimaryDatabase } from '../db' +import { Database } from '../db' import { Actor } from '../db/tables/actor' import * as Post from './plugins/post' import * as Threadgate from './plugins/thread-gate' @@ -47,7 +47,7 @@ export class IndexingService { } constructor( - public db: PrimaryDatabase, + public db: Database, public idResolver: IdResolver, public background: BackgroundQueue, ) { @@ -66,7 +66,7 @@ export class IndexingService { } } - transact(txn: PrimaryDatabase) { + transact(txn: Database) { txn.assertTransaction() return new IndexingService(txn, this.idResolver, this.background) } diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/block.ts b/packages/bsky/src/data-plane/server/indexing/plugins/block.ts index feb94b3256f..ec4956a04f5 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/block.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/block.ts @@ -3,7 +3,7 @@ import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' import * as Block from '../../../../lexicon/types/app/bsky/graph/block' import * as lex from '../../../../lexicon/lexicons' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' import { BackgroundQueue } from '../../background' @@ -71,7 +71,7 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts b/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts index 7af296fb26f..f3b82c75567 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/feed-generator.ts @@ -3,7 +3,7 @@ import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' import * as FeedGenerator from '../../../../lexicon/types/app/bsky/feed/generator' import * as lex from '../../../../lexicon/lexicons' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' import { BackgroundQueue } from '../../background' @@ -70,7 +70,7 @@ export type PluginType = RecordProcessor< > export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts b/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts index f8f10069191..6f238755761 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/follow.ts @@ -4,7 +4,7 @@ import { CID } from 'multiformats/cid' import * as Follow from '../../../../lexicon/types/app/bsky/graph/follow' import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { countAll, excluded } from '../../db/util' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import { BackgroundQueue } from '../../background' @@ -118,7 +118,7 @@ const updateAggregates = async (db: DatabaseSchema, follow: IndexedFollow) => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/like.ts b/packages/bsky/src/data-plane/server/indexing/plugins/like.ts index 849e0ed5bbb..98e9fc722f8 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/like.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/like.ts @@ -5,7 +5,7 @@ import * as Like from '../../../../lexicon/types/app/bsky/feed/like' import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' import { countAll, excluded } from '../../db/util' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import { BackgroundQueue } from '../../background' @@ -108,7 +108,7 @@ const updateAggregates = async (db: DatabaseSchema, like: IndexedLike) => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts index da6d1481553..09eabcdb9f4 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list-block.ts @@ -3,7 +3,7 @@ import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import { CID } from 'multiformats/cid' import * as ListBlock from '../../../../lexicon/types/app/bsky/graph/listblock' import * as lex from '../../../../lexicon/lexicons' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' import { BackgroundQueue } from '../../background' @@ -71,7 +71,7 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts index 37b987f8f18..f2a43cff485 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list-item.ts @@ -5,7 +5,7 @@ import { CID } from 'multiformats/cid' import * as ListItem from '../../../../lexicon/types/app/bsky/graph/listitem' import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import { BackgroundQueue } from '../../background' @@ -79,7 +79,7 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/list.ts b/packages/bsky/src/data-plane/server/indexing/plugins/list.ts index 52dc67e4e7d..f6deaf0a68e 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/list.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/list.ts @@ -5,7 +5,7 @@ import * as List from '../../../../lexicon/types/app/bsky/graph/list' import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyGraphList @@ -67,7 +67,7 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/post.ts b/packages/bsky/src/data-plane/server/indexing/plugins/post.ts index 1ed538a55b8..cc4121ab667 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/post.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/post.ts @@ -19,7 +19,7 @@ import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' import { Notification } from '../../db/tables/notification' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { countAll, excluded } from '../../db/util' import { getAncestorsAndSelfQb, @@ -395,7 +395,7 @@ const updateAggregates = async (db: DatabaseSchema, postIdx: IndexedPost) => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts b/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts index 1a4f3804f55..18c9b54bbb9 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/profile.ts @@ -4,7 +4,7 @@ import * as Profile from '../../../../lexicon/types/app/bsky/actor/profile' import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { BackgroundQueue } from '../../background' const lexId = lex.ids.AppBskyActorProfile @@ -63,7 +63,7 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts b/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts index 8e6c794fcec..ec2e7754fb0 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/repost.ts @@ -4,7 +4,7 @@ import { AtUri, normalizeDatetimeAlways } from '@atproto/syntax' import * as Repost from '../../../../lexicon/types/app/bsky/feed/repost' import * as lex from '../../../../lexicon/lexicons' import RecordProcessor from '../processor' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import { countAll, excluded } from '../../db/util' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' import { BackgroundQueue } from '../../background' @@ -133,7 +133,7 @@ const updateAggregates = async (db: DatabaseSchema, repost: IndexedRepost) => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts b/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts index c306602f973..0402fe8289f 100644 --- a/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts +++ b/packages/bsky/src/data-plane/server/indexing/plugins/thread-gate.ts @@ -4,7 +4,7 @@ import { CID } from 'multiformats/cid' import * as Threadgate from '../../../../lexicon/types/app/bsky/feed/threadgate' import * as lex from '../../../../lexicon/lexicons' import { DatabaseSchema, DatabaseSchemaType } from '../../db/database-schema' -import { PrimaryDatabase } from '../../db' +import { Database } from '../../db' import RecordProcessor from '../processor' import { BackgroundQueue } from '../../background' @@ -76,7 +76,7 @@ const notifsForDelete = () => { export type PluginType = RecordProcessor export const makePlugin = ( - db: PrimaryDatabase, + db: Database, background: BackgroundQueue, ): PluginType => { return new RecordProcessor(db, background, { diff --git a/packages/bsky/src/data-plane/server/indexing/processor.ts b/packages/bsky/src/data-plane/server/indexing/processor.ts index 341dd1ccb0a..77a8fbdf09f 100644 --- a/packages/bsky/src/data-plane/server/indexing/processor.ts +++ b/packages/bsky/src/data-plane/server/indexing/processor.ts @@ -4,7 +4,7 @@ import { AtUri } from '@atproto/syntax' import { chunkArray } from '@atproto/common' import { jsonStringToLex, stringifyLex } from '@atproto/lexicon' import { lexicons } from '../../../lexicon/lexicons' -import { PrimaryDatabase } from '../db' +import { Database } from '../db' import DatabaseSchema from '../db/database-schema' import { Notification } from '../db/tables/notification' import { BackgroundQueue } from '../background' @@ -40,7 +40,7 @@ export class RecordProcessor { collection: string db: DatabaseSchema constructor( - private appDb: PrimaryDatabase, + private appDb: Database, private background: BackgroundQueue, private params: RecordProcessorParams, ) { @@ -225,7 +225,7 @@ export class RecordProcessor { async handleNotifs(op: { deleted?: S; inserted?: S }) { let notifs: Notif[] = [] - const runOnCommit: ((db: PrimaryDatabase) => Promise)[] = [] + const runOnCommit: ((db: Database) => Promise)[] = [] if (op.deleted) { const forDelete = this.params.notifsForDelete( op.deleted, diff --git a/packages/bsky/src/data-plane/server/subscription/index.ts b/packages/bsky/src/data-plane/server/subscription/index.ts index 5054441b6b9..dddc07aa39c 100644 --- a/packages/bsky/src/data-plane/server/subscription/index.ts +++ b/packages/bsky/src/data-plane/server/subscription/index.ts @@ -17,7 +17,7 @@ import { OutputSchema as Message } from '../../../lexicon/types/com/atproto/sync import * as message from '../../../lexicon/types/com/atproto/sync/subscribeRepos' import { subLogger as log } from '../../../logger' import { IndexingService } from '../indexing' -import { PrimaryDatabase } from '../db' +import { Database } from '../db' import { ConsecutiveItem, ConsecutiveList, @@ -40,7 +40,7 @@ export class RepoSubscription { constructor( private opts: { service: string - db: PrimaryDatabase + db: Database idResolver: IdResolver background: BackgroundQueue }, diff --git a/packages/bsky/src/index.ts b/packages/bsky/src/index.ts index df8476f7f36..930e327d150 100644 --- a/packages/bsky/src/index.ts +++ b/packages/bsky/src/index.ts @@ -24,11 +24,7 @@ export * from './data-plane' export type { ServerConfigValues } from './config' export type { MountedAlgos } from './api/feed-gen/types' export { ServerConfig } from './config' -export { - Database, - PrimaryDatabase, - DatabaseCoordinator, -} from './data-plane/server/db' +export { Database } from './data-plane/server/db' export { Redis } from './redis' export { AppContext } from './context' export { makeAlgos } from './api/feed-gen' diff --git a/packages/bsky/tests/data-plane/db.test.ts b/packages/bsky/tests/data-plane/db.test.ts index 1b0787828ae..379906ab847 100644 --- a/packages/bsky/tests/data-plane/db.test.ts +++ b/packages/bsky/tests/data-plane/db.test.ts @@ -1,17 +1,17 @@ import { sql } from 'kysely' import { wait } from '@atproto/common' import { TestNetwork } from '@atproto/dev-env' -import { Database, PrimaryDatabase } from '../../src' +import { Database } from '../../src' describe('db', () => { let network: TestNetwork - let db: PrimaryDatabase + let db: Database beforeAll(async () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_db', }) - db = network.bsky.db.getPrimary() + db = network.bsky.db }) afterAll(async () => { diff --git a/packages/bsky/tests/data-plane/did-cache.test.ts b/packages/bsky/tests/data-plane/did-cache.test.ts index e5192ed3d97..d90de832cd6 100644 --- a/packages/bsky/tests/data-plane/did-cache.test.ts +++ b/packages/bsky/tests/data-plane/did-cache.test.ts @@ -81,7 +81,7 @@ describe('did cache', () => { }) it('accurately reports expired dids & refreshes the cache', async () => { - const didCache = new DidSqlCache(network.bsky.db.getPrimary(), 1, 60000) + const didCache = new DidSqlCache(network.bsky.db, 1, 60000) const shortCacheResolver = new IdResolver({ plcUrl: network.bsky.ctx.cfg.didPlcUrl, didCache, @@ -110,7 +110,7 @@ describe('did cache', () => { }) it('does not return expired dids & refreshes the cache', async () => { - const didCache = new DidSqlCache(network.bsky.db.getPrimary(), 0, 1) + const didCache = new DidSqlCache(network.bsky.db, 0, 1) const shortExpireResolver = new IdResolver({ plcUrl: network.bsky.ctx.cfg.didPlcUrl, didCache, diff --git a/packages/bsky/tests/data-plane/duplicate-records.test.ts b/packages/bsky/tests/data-plane/duplicate-records.test.ts index d9570d03b37..da7287893ba 100644 --- a/packages/bsky/tests/data-plane/duplicate-records.test.ts +++ b/packages/bsky/tests/data-plane/duplicate-records.test.ts @@ -3,18 +3,18 @@ import { cidForCbor, TID } from '@atproto/common' import { WriteOpAction } from '@atproto/repo' import { TestNetwork } from '@atproto/dev-env' import * as lex from '../../src/lexicon/lexicons' -import { Database, PrimaryDatabase } from '../../src' +import { Database } from '../../src' describe('duplicate record', () => { let network: TestNetwork let did: string - let db: PrimaryDatabase + let db: Database beforeAll(async () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_duplicates', }) - db = network.bsky.db.getPrimary() + db = network.bsky.db did = 'did:example:alice' }) diff --git a/packages/bsky/tests/data-plane/handle-invalidation.test.ts b/packages/bsky/tests/data-plane/handle-invalidation.test.ts index fa20a5f02bd..2f0df04e4ff 100644 --- a/packages/bsky/tests/data-plane/handle-invalidation.test.ts +++ b/packages/bsky/tests/data-plane/handle-invalidation.test.ts @@ -43,9 +43,8 @@ describe('handle invalidation', () => { const backdateIndexedAt = async (did: string) => { const TWO_DAYS_AGO = new Date(Date.now() - 2 * DAY).toISOString() - await network.bsky.db - .getPrimary() - .db.updateTable('actor') + await network.bsky.db.db + .updateTable('actor') .set({ indexedAt: TWO_DAYS_AGO }) .where('did', '=', did) .execute() diff --git a/packages/bsky/tests/data-plane/indexing.test.ts b/packages/bsky/tests/data-plane/indexing.test.ts index e141491dd34..1a84d7dc320 100644 --- a/packages/bsky/tests/data-plane/indexing.test.ts +++ b/packages/bsky/tests/data-plane/indexing.test.ts @@ -32,7 +32,7 @@ describe('indexing', () => { agent = network.bsky.getClient() pdsAgent = network.pds.getClient() sc = network.getSeedClient() - db = network.bsky.db.getPrimary() + db = network.bsky.db await usersSeed(sc) // Data in tests is not processed from subscription await network.processAll() diff --git a/packages/bsky/tests/data-plane/subscription/repo.test.ts b/packages/bsky/tests/data-plane/subscription/repo.test.ts index a80ab92aa33..42ee6da7e3d 100644 --- a/packages/bsky/tests/data-plane/subscription/repo.test.ts +++ b/packages/bsky/tests/data-plane/subscription/repo.test.ts @@ -29,7 +29,7 @@ describe('sync', () => { }) it('indexes permit history being replayed.', async () => { - const db = network.bsky.db.getPrimary() + const { db } = network.bsky // Generate some modifications and dupes const { alice, bob, carol, dan } = sc.dids @@ -97,9 +97,7 @@ describe('sync', () => { }) await network.processAll() // confirm jack was indexed as an actor despite the bad event - const actors = await dumpTable(network.bsky.db.getPrimary(), 'actor', [ - 'did', - ]) + const actors = await dumpTable(network.bsky.db, 'actor', ['did']) expect(actors.map((a) => a.handle)).toContain('jack.test') network.pds.ctx.sequencer.sequenceCommit = sequenceCommitOrig }) diff --git a/packages/bsky/tests/feed-generation.test.ts b/packages/bsky/tests/feed-generation.test.ts index 500b6e68379..23b0f2bc170 100644 --- a/packages/bsky/tests/feed-generation.test.ts +++ b/packages/bsky/tests/feed-generation.test.ts @@ -74,9 +74,8 @@ describe('feed generation', () => { { uri: feedUriBadPagination.toString(), order: 3 }, { uri: primeUri.toString(), order: 4 }, ] - await network.bsky.db - .getPrimary() - .db.insertInto('suggested_feed') + await network.bsky.db.db + .insertInto('suggested_feed') .values(feedSuggestions) .execute() }) diff --git a/packages/bsky/tests/seeds/basic.ts b/packages/bsky/tests/seeds/basic.ts index 87311b78ccc..e674d2243dc 100644 --- a/packages/bsky/tests/seeds/basic.ts +++ b/packages/bsky/tests/seeds/basic.ts @@ -1,7 +1,8 @@ import { SeedClient, TestNetwork, TestNetworkNoAppView } from '@atproto/dev-env' import { ids } from '../../src/lexicon/lexicons' import usersSeed from './users' -import { PrimaryDatabase } from '../../src' +import { Database } from '../../src' +import DatabaseSchema from '@atproto/pds/dist/db/database-schema' export default async ( sc: SeedClient, @@ -135,7 +136,7 @@ export default async ( await sc.repost(dan, alicesReplyToBob.ref) if (sc.network instanceof TestNetwork) { - const db = sc.network.bsky.db.getPrimary() + const { db } = sc.network.bsky await createLabel(db, { val: 'test-label', uri: sc.posts[alice][2].ref.uriStr, @@ -170,7 +171,7 @@ export const replies = { } const createLabel = async ( - db: PrimaryDatabase, + db: Database, opts: { uri: string; cid: string; val: string }, ) => { await db.db diff --git a/packages/bsky/tests/server.test.ts b/packages/bsky/tests/server.test.ts index 3084d8f2727..a6f65513c40 100644 --- a/packages/bsky/tests/server.test.ts +++ b/packages/bsky/tests/server.test.ts @@ -3,13 +3,11 @@ import express from 'express' import axios, { AxiosError } from 'axios' import { TestNetwork } from '@atproto/dev-env' import { handler as errorHandler } from '../src/error' -import { Database } from '../src' import basicSeed from './seeds/basic' import { once } from 'events' describe('server', () => { let network: TestNetwork - let db: Database let alice: string beforeAll(async () => { @@ -20,7 +18,6 @@ describe('server', () => { await basicSeed(sc) await network.processAll() alice = sc.dids.alice - db = network.bsky.db.getPrimary() }) afterAll(async () => { diff --git a/packages/bsky/tests/views/actor-search.test.ts b/packages/bsky/tests/views/actor-search.test.ts index 0a51556c591..d1dea5ab285 100644 --- a/packages/bsky/tests/views/actor-search.test.ts +++ b/packages/bsky/tests/views/actor-search.test.ts @@ -24,7 +24,7 @@ describe.skip('pds actor search views', () => { await usersBulkSeed(sc) // Skip did/handle resolution for expediency - const db = network.bsky.db.getPrimary() + const { db } = network.bsky const now = new Date().toISOString() await db.db .insertInto('actor') diff --git a/packages/bsky/tests/views/suggested-follows.test.ts b/packages/bsky/tests/views/suggested-follows.test.ts index 2be077e7dc6..5f2df54b4c5 100644 --- a/packages/bsky/tests/views/suggested-follows.test.ts +++ b/packages/bsky/tests/views/suggested-follows.test.ts @@ -26,9 +26,8 @@ describe('suggested follows', () => { { did: sc.dids.fred, order: 5 }, { did: sc.dids.gina, order: 6 }, ] - await network.bsky.db - .getPrimary() - .db.insertInto('suggested_follow') + await network.bsky.db.db + .insertInto('suggested_follow') .values(suggestions) .execute() }) diff --git a/packages/bsky/tests/views/suggestions.test.ts b/packages/bsky/tests/views/suggestions.test.ts index d524a420295..401bf79030c 100644 --- a/packages/bsky/tests/views/suggestions.test.ts +++ b/packages/bsky/tests/views/suggestions.test.ts @@ -24,9 +24,8 @@ describe('pds user search views', () => { { did: sc.dids.dan, order: 4 }, ] - await network.bsky.db - .getPrimary() - .db.insertInto('suggested_follow') + await network.bsky.db.db + .insertInto('suggested_follow') .values(suggestions) .execute() }) diff --git a/packages/bsky/tests/views/timeline.test.ts b/packages/bsky/tests/views/timeline.test.ts index 70a3cfad21d..25b80d65a63 100644 --- a/packages/bsky/tests/views/timeline.test.ts +++ b/packages/bsky/tests/views/timeline.test.ts @@ -4,7 +4,7 @@ import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, getOriginator, paginateAll } from '../_util' import basicSeed from '../seeds/basic' import { FeedViewPost } from '../../src/lexicon/types/app/bsky/feed/defs' -import { PrimaryDatabase } from '../../src' +import { Database } from '../../src' const REVERSE_CHRON = 'reverse-chronological' @@ -32,7 +32,7 @@ describe('timeline views', () => { carol = sc.dids.carol dan = sc.dids.dan // covers label hydration on embeds - const db = network.bsky.db.getPrimary() + const { db } = network.bsky await createLabel(db, { val: 'test-label-3', uri: sc.posts[bob][0].ref.uriStr, @@ -234,7 +234,7 @@ describe('timeline views', () => { }) const createLabel = async ( - db: PrimaryDatabase, + db: Database, opts: { uri: string; cid: string; val: string }, ) => { await db.db diff --git a/packages/dev-env/src/bsky.ts b/packages/dev-env/src/bsky.ts index 3b726f2a096..e9f95158235 100644 --- a/packages/dev-env/src/bsky.ts +++ b/packages/dev-env/src/bsky.ts @@ -13,7 +13,7 @@ export class TestBsky { constructor( public url: string, public port: number, - public db: bsky.DatabaseCoordinator, + public db: bsky.Database, public server: bsky.BskyAppView, public dataplane: bsky.DataPlaneServer, public sub: bsky.RepoSubscription, @@ -34,20 +34,14 @@ export class TestBsky { }) // shared across server, ingester, and indexer in order to share pool, avoid too many pg connections. - const db = new bsky.DatabaseCoordinator({ + const db = new bsky.Database({ + url: cfg.dbPostgresUrl, schema: cfg.dbPostgresSchema, - primary: { - url: cfg.dbPostgresUrl, - poolSize: 10, - }, - replicas: [], + poolSize: 10, }) const dataplanePort = await getPort() - const dataplane = await bsky.DataPlaneServer.create( - db.getPrimary(), - dataplanePort, - ) + const dataplane = await bsky.DataPlaneServer.create(db, dataplanePort) const config = new bsky.ServerConfig({ version: 'unknown', @@ -64,7 +58,7 @@ export class TestBsky { }) // Separate migration db in case migration changes some connection state that we need in the tests, e.g. "alter database ... set ..." - const migrationDb = new bsky.PrimaryDatabase({ + const migrationDb = new bsky.Database({ url: cfg.dbPostgresUrl, schema: cfg.dbPostgresSchema, }) @@ -75,7 +69,7 @@ export class TestBsky { } await migrationDb.close() - const didCache = new bsky.DidSqlCache(db.getPrimary(), HOUR, DAY) + const didCache = new bsky.DidSqlCache(db, HOUR, DAY) // api server const server = bsky.BskyAppView.create({ @@ -87,9 +81,9 @@ export class TestBsky { const sub = new bsky.RepoSubscription({ service: cfg.repoProvider, - db: db.getPrimary(), + db, idResolver: server.ctx.idResolver, - background: new BackgroundQueue(db.getPrimary()), + background: new BackgroundQueue(db), }) await server.start() diff --git a/packages/dev-env/src/mock/index.ts b/packages/dev-env/src/mock/index.ts index eec0bb3b38e..f115a1112ef 100644 --- a/packages/dev-env/src/mock/index.ts +++ b/packages/dev-env/src/mock/index.ts @@ -1,6 +1,6 @@ import { AtUri } from '@atproto/syntax' import AtpAgent from '@atproto/api' -import { PrimaryDatabase } from '@atproto/bsky' +import { Database } from '@atproto/bsky' import { REASONSPAM, REASONOTHER, @@ -187,12 +187,12 @@ export async function generateMockSetup(env: TestNetwork) { }, ) - await createLabel(env.bsky.db.getPrimary(), { + await createLabel(env.bsky.db, { uri: labeledPost.uri, cid: labeledPost.cid, val: 'nudity', }) - await createLabel(env.bsky.db.getPrimary(), { + await createLabel(env.bsky.db, { uri: filteredPost.uri, cid: filteredPost.cid, val: 'dmca-violation', @@ -332,7 +332,7 @@ function ucfirst(str: string): string { } const createLabel = async ( - db: PrimaryDatabase, + db: Database, opts: { uri: string; cid: string; val: string }, ) => { await db.db diff --git a/packages/pds/tests/proxied/views.test.ts b/packages/pds/tests/proxied/views.test.ts index b0fd3e6e17a..39525d33c52 100644 --- a/packages/pds/tests/proxied/views.test.ts +++ b/packages/pds/tests/proxied/views.test.ts @@ -79,9 +79,8 @@ describe('proxies view requests', () => { { did: sc.dids.carol, order: 2 }, { did: sc.dids.dan, order: 3 }, ] - await network.bsky.db - .getPrimary() - .db.insertInto('suggested_follow') + await network.bsky.db.db + .insertInto('suggested_follow') .values(suggestions) .execute() diff --git a/packages/pds/tests/seeds/basic.ts b/packages/pds/tests/seeds/basic.ts index 31195c5b9ba..89a5b210c2f 100644 --- a/packages/pds/tests/seeds/basic.ts +++ b/packages/pds/tests/seeds/basic.ts @@ -157,9 +157,8 @@ const createLabel = async ( bsky: TestBsky, opts: { did: string; val: string }, ) => { - await bsky.db - .getPrimary() - .db.insertInto('label') + await bsky.db.db + .insertInto('label') .values({ uri: opts.did, cid: '',