diff --git a/Makefile b/Makefile index f9e90a86a44..3bc977c15a3 100644 --- a/Makefile +++ b/Makefile @@ -22,15 +22,9 @@ test: ## Run all tests run-dev-env: ## Run a "development environment" shell cd packages/dev-env; pnpm run start -.PHONY: run-dev-pds -run-dev-pds: ## Run PDS locally - if [ ! -f "packages/pds/.dev.env" ]; then cp packages/pds/example.dev.env packages/pds/.dev.env; fi - cd packages/pds; ENV=dev pnpm run start | pnpm exec pino-pretty - -.PHONY: run-dev-bsky -run-dev-bsky: ## Run appview ('bsky') locally - if [ ! -f "packages/bsky/.dev.env" ]; then cp packages/bsky/example.dev.env packages/bsky/.dev.env; fi - cd packages/bsky; ENV=dev pnpm run start | pnpm exec pino-pretty +.PHONY: run-dev-env-logged +run-dev-env: ## Run a "development environment" shell + LOG_ENABLED=true cd packages/dev-env; pnpm run start | pnpm exec pino-pretty .PHONY: codegen codegen: ## Re-generate packages from lexicon/ files diff --git a/packages/api/package.json b/packages/api/package.json index 20780bfb888..a8af7b6b3d7 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -40,7 +40,7 @@ }, "devDependencies": { "@atproto/lex-cli": "workspace:^", - "@atproto/pds": "workspace:^", + "@atproto/dev-env": "workspace:^", "common-tags": "^1.8.2" } } diff --git a/packages/api/tests/agent.test.ts b/packages/api/tests/agent.test.ts index 82d590ec85b..933326c43f2 100644 --- a/packages/api/tests/agent.test.ts +++ b/packages/api/tests/agent.test.ts @@ -1,29 +1,23 @@ import { defaultFetchHandler } from '@atproto/xrpc' -import { - CloseFn, - runTestServer, - TestServerInfo, -} from '@atproto/pds/tests/_util' import { AtpAgent, AtpAgentFetchHandlerResponse, AtpSessionEvent, AtpSessionData, } from '..' +import { TestNetworkNoAppView } from '@atproto/dev-env' describe('agent', () => { - let server: TestServerInfo - let close: CloseFn + let network: TestNetworkNoAppView beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'api_agent', }) - close = server.close }) afterAll(async () => { - await close() + await network.close() }) it('creates a new session on account creation.', async () => { @@ -34,7 +28,7 @@ describe('agent', () => { sessions.push(sess) } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) const res = await agent.createAccount({ handle: 'user1.test', @@ -74,7 +68,7 @@ describe('agent', () => { sessions.push(sess) } - const agent1 = new AtpAgent({ service: server.url, persistSession }) + const agent1 = new AtpAgent({ service: network.pds.url, persistSession }) const email = 'user2@test.com' await agent1.createAccount({ @@ -83,7 +77,7 @@ describe('agent', () => { password: 'password', }) - const agent2 = new AtpAgent({ service: server.url, persistSession }) + const agent2 = new AtpAgent({ service: network.pds.url, persistSession }) const res1 = await agent2.login({ identifier: 'user2.test', password: 'password', @@ -122,7 +116,7 @@ describe('agent', () => { sessions.push(sess) } - const agent1 = new AtpAgent({ service: server.url, persistSession }) + const agent1 = new AtpAgent({ service: network.pds.url, persistSession }) await agent1.createAccount({ handle: 'user3.test', @@ -133,7 +127,7 @@ describe('agent', () => { throw new Error('No session created') } - const agent2 = new AtpAgent({ service: server.url, persistSession }) + const agent2 = new AtpAgent({ service: network.pds.url, persistSession }) const res1 = await agent2.resumeSession(agent1.session) expect(agent2.hasSession).toEqual(true) @@ -165,7 +159,7 @@ describe('agent', () => { sessions.push(sess) } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) // create an account and a session with it await agent.createAccount({ @@ -230,7 +224,7 @@ describe('agent', () => { sessions.push(sess) } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) // create an account and a session with it await agent.createAccount({ @@ -309,7 +303,7 @@ describe('agent', () => { sessions.push(sess) } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) try { await agent.login({ @@ -349,7 +343,7 @@ describe('agent', () => { sessions.push(sess) } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) // create an account and a session with it await agent.createAccount({ @@ -420,7 +414,7 @@ describe('agent', () => { newHandlerCallCount++ } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) await agent.createAccount({ handle: 'user7.test', @@ -452,7 +446,7 @@ describe('agent', () => { sessions.push(sess) } - const agent = new AtpAgent({ service: server.url, persistSession }) + const agent = new AtpAgent({ service: network.pds.url, persistSession }) await expect( agent.createAccount({ diff --git a/packages/api/tests/bsky-agent.test.ts b/packages/api/tests/bsky-agent.test.ts index 8066bd61f3a..5582f7ac021 100644 --- a/packages/api/tests/bsky-agent.test.ts +++ b/packages/api/tests/bsky-agent.test.ts @@ -1,23 +1,17 @@ -import { - CloseFn, - runTestServer, - TestServerInfo, -} from '@atproto/pds/tests/_util' +import { TestNetworkNoAppView } from '@atproto/dev-env' import { BskyAgent, ComAtprotoRepoPutRecord, AppBskyActorProfile } from '..' describe('agent', () => { - let server: TestServerInfo - let close: CloseFn + let network: TestNetworkNoAppView beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'bsky_agent', }) - close = server.close }) afterAll(async () => { - await close() + await network.close() }) const getProfileDisplayName = async ( @@ -35,7 +29,7 @@ describe('agent', () => { } it('upsertProfile correctly creates and updates profiles.', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await agent.createAccount({ handle: 'user1.test', @@ -67,7 +61,7 @@ describe('agent', () => { }) it('upsertProfile correctly handles CAS failures.', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await agent.createAccount({ handle: 'user2.test', @@ -106,7 +100,7 @@ describe('agent', () => { }) it('upsertProfile wont endlessly retry CAS failures.', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await agent.createAccount({ handle: 'user3.test', @@ -135,7 +129,7 @@ describe('agent', () => { }) it('upsertProfile validates the record.', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await agent.createAccount({ handle: 'user4.test', @@ -153,70 +147,70 @@ describe('agent', () => { describe('app', () => { it('should retrieve the api app', () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) expect(agent.app).toBe(agent.api.app) }) }) describe('post', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.post({ text: 'foo' })).rejects.toThrow('Not logged in') }) }) describe('deletePost', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.deletePost('foo')).rejects.toThrow('Not logged in') }) }) describe('like', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.like('foo', 'bar')).rejects.toThrow('Not logged in') }) }) describe('deleteLike', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.deleteLike('foo')).rejects.toThrow('Not logged in') }) }) describe('repost', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.repost('foo', 'bar')).rejects.toThrow('Not logged in') }) }) describe('deleteRepost', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.deleteRepost('foo')).rejects.toThrow('Not logged in') }) }) describe('follow', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.follow('foo')).rejects.toThrow('Not logged in') }) }) describe('deleteFollow', () => { it('should throw if no session', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await expect(agent.deleteFollow('foo')).rejects.toThrow('Not logged in') }) }) describe('preferences methods', () => { it('gets and sets preferences correctly', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await agent.createAccount({ handle: 'user5.test', @@ -714,7 +708,7 @@ describe('agent', () => { }) it('resolves duplicates correctly', async () => { - const agent = new BskyAgent({ service: server.url }) + const agent = new BskyAgent({ service: network.pds.url }) await agent.createAccount({ handle: 'user6.test', diff --git a/packages/api/tests/errors.test.ts b/packages/api/tests/errors.test.ts index 76e1424badc..2d903f2f17e 100644 --- a/packages/api/tests/errors.test.ts +++ b/packages/api/tests/errors.test.ts @@ -1,25 +1,19 @@ -import { - CloseFn, - runTestServer, - TestServerInfo, -} from '@atproto/pds/tests/_util' import { AtpAgent, ComAtprotoServerCreateAccount } from '..' +import { TestNetworkNoAppView } from '@atproto/dev-env' describe('errors', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let client: AtpAgent - let close: CloseFn beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'known_errors', }) - client = new AtpAgent({ service: server.url }) - close = server.close + client = network.pds.getClient() }) afterAll(async () => { - await close() + await network.close() }) it('constructs the correct error instance', async () => { diff --git a/packages/bsky/example.dev.env b/packages/bsky/example.dev.env deleted file mode 100644 index 021790c9731..00000000000 --- a/packages/bsky/example.dev.env +++ /dev/null @@ -1,5 +0,0 @@ -DB_POSTGRES_URL="postgres://bsky:yksb@localhost/bsky_dev" -DEBUG_MODE=1 -LOG_ENABLED="true" -LOG_LEVEL=debug -LOG_DESTINATION=1 diff --git a/packages/bsky/package.json b/packages/bsky/package.json index 03586043bcd..77fe28c7bf6 100644 --- a/packages/bsky/package.json +++ b/packages/bsky/package.json @@ -43,7 +43,6 @@ "@isaacs/ttlcache": "^1.4.1", "compression": "^1.7.4", "cors": "^2.8.5", - "dotenv": "^16.0.0", "express": "^4.17.2", "express-async-errors": "^3.1.1", "form-data": "^4.0.0", diff --git a/packages/bsky/src/api/app/bsky/feed/getPostThread.ts b/packages/bsky/src/api/app/bsky/feed/getPostThread.ts index 0c26285b384..1480e54ac1a 100644 --- a/packages/bsky/src/api/app/bsky/feed/getPostThread.ts +++ b/packages/bsky/src/api/app/bsky/feed/getPostThread.ts @@ -37,9 +37,9 @@ export default function (server: Server, ctx: AppContext) { presentation, ) server.app.bsky.feed.getPostThread({ - auth: ctx.authOptionalVerifier, + auth: ctx.authOptionalAccessOrRoleVerifier, handler: async ({ params, auth, res }) => { - const viewer = auth.credentials.did + const viewer = 'did' in auth.credentials ? auth.credentials.did : null const db = ctx.db.getReplica('thread') const feedService = ctx.services.feed(db) const actorService = ctx.services.actor(db) diff --git a/packages/bsky/src/db/migrations/20230929T192920807Z-record-cursor-indexes.ts b/packages/bsky/src/db/migrations/20230929T192920807Z-record-cursor-indexes.ts new file mode 100644 index 00000000000..0b587fe0eaf --- /dev/null +++ b/packages/bsky/src/db/migrations/20230929T192920807Z-record-cursor-indexes.ts @@ -0,0 +1,40 @@ +import { Kysely } from 'kysely' + +export async function up(db: Kysely): Promise { + await db.schema + .createIndex('like_creator_cursor_idx') + .on('like') + .columns(['creator', 'sortAt', 'cid']) + .execute() + await db.schema + .createIndex('follow_creator_cursor_idx') + .on('follow') + .columns(['creator', 'sortAt', 'cid']) + .execute() + await db.schema + .createIndex('follow_subject_cursor_idx') + .on('follow') + .columns(['subjectDid', 'sortAt', 'cid']) + .execute() + + // drop old indices that are superceded by these + await db.schema.dropIndex('like_creator_idx').execute() + await db.schema.dropIndex('follow_subjectdid_idx').execute() +} + +export async function down(db: Kysely): Promise { + await db.schema + .createIndex('like_creator_idx') + .on('like') + .column('creator') + .execute() + await db.schema + .createIndex('follow_subjectdid_idx') + .on('follow') + .column('subjectDid') + .execute() + + await db.schema.dropIndex('like_creator_cursor_idx').execute() + await db.schema.dropIndex('follow_creator_cursor_idx').execute() + await db.schema.dropIndex('follow_subject_cursor_idx').execute() +} diff --git a/packages/bsky/src/db/migrations/index.ts b/packages/bsky/src/db/migrations/index.ts index 0835376656c..3cbef174e34 100644 --- a/packages/bsky/src/db/migrations/index.ts +++ b/packages/bsky/src/db/migrations/index.ts @@ -29,4 +29,8 @@ export * as _20230830T205507322Z from './20230830T205507322Z-suggested-feeds' export * as _20230904T211011773Z from './20230904T211011773Z-block-lists' export * as _20230906T222220386Z from './20230906T222220386Z-thread-gating' export * as _20230920T213858047Z from './20230920T213858047Z-add-tags-to-post' +<<<<<<< HEAD export * as _20230920T202833377Z from './20230927T202833377Z-create-moderation-subject-status' +======= +export * as _20230929T192920807Z from './20230929T192920807Z-record-cursor-indexes' +>>>>>>> 26538a733e9a6a1995560931211fc155d1d8d969 diff --git a/packages/bsky/src/feed-gen/with-friends.ts b/packages/bsky/src/feed-gen/with-friends.ts index 98f784102a5..0fd8f31c48e 100644 --- a/packages/bsky/src/feed-gen/with-friends.ts +++ b/packages/bsky/src/feed-gen/with-friends.ts @@ -15,7 +15,7 @@ const handler: AlgoHandler = async ( const { ref } = db.db.dynamic - const keyset = new FeedKeyset(ref('post.indexedAt'), ref('post.cid')) + const keyset = new FeedKeyset(ref('post.sortAt'), ref('post.cid')) const sortFrom = keyset.unpack(cursor)?.primary let postsQb = feedService @@ -24,7 +24,7 @@ const handler: AlgoHandler = async ( .innerJoin('post_agg', 'post_agg.uri', 'post.uri') .where('post_agg.likeCount', '>=', 5) .where('follow.creator', '=', requester) - .where('post.indexedAt', '>', getFeedDateThreshold(sortFrom)) + .where('post.sortAt', '>', getFeedDateThreshold(sortFrom)) postsQb = paginate(postsQb, { limit, cursor, keyset, tryIndex: true }) diff --git a/packages/bsky/tests/__snapshots__/feed-generation.test.ts.snap b/packages/bsky/tests/__snapshots__/feed-generation.test.ts.snap index 3ddf6266fbd..1a5f8fc9281 100644 --- a/packages/bsky/tests/__snapshots__/feed-generation.test.ts.snap +++ b/packages/bsky/tests/__snapshots__/feed-generation.test.ts.snap @@ -419,12 +419,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(6)@jpeg", }, @@ -475,7 +475,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -486,7 +486,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -679,12 +679,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(6)@jpeg", }, @@ -735,7 +735,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -746,7 +746,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -906,12 +906,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(4)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(4)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(5)@jpeg", }, @@ -962,7 +962,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -973,7 +973,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/__snapshots__/indexing.test.ts.snap b/packages/bsky/tests/__snapshots__/indexing.test.ts.snap index f7ccc4e688a..0ed4aeb4d02 100644 --- a/packages/bsky/tests/__snapshots__/indexing.test.ts.snap +++ b/packages/bsky/tests/__snapshots__/indexing.test.ts.snap @@ -101,7 +101,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(2)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(2)/cids(5)@jpeg", }, @@ -134,7 +134,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -245,7 +245,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -256,7 +256,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/algos/hot-classic.test.ts b/packages/bsky/tests/algos/hot-classic.test.ts index d7f3c221e1b..aa96967c2c5 100644 --- a/packages/bsky/tests/algos/hot-classic.test.ts +++ b/packages/bsky/tests/algos/hot-classic.test.ts @@ -1,8 +1,7 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { makeAlgos } from '../../src' -import { TestNetwork } from '@atproto/dev-env' describe('algo hot-classic', () => { let network: TestNetwork @@ -26,8 +25,7 @@ describe('algo hot-classic', () => { bsky: { algos: makeAlgos(feedPublisherDid) }, }) agent = new AtpAgent({ service: network.bsky.url }) - const pdsAgent = new AtpAgent({ service: network.pds.url }) - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice @@ -43,7 +41,7 @@ describe('algo hot-classic', () => { it('returns well liked posts', async () => { const img = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) const one = await sc.post(alice, 'first post', undefined, [img]) diff --git a/packages/bsky/tests/algos/whats-hot.test.ts b/packages/bsky/tests/algos/whats-hot.test.ts index 23cac215dda..9fb93a8ce50 100644 --- a/packages/bsky/tests/algos/whats-hot.test.ts +++ b/packages/bsky/tests/algos/whats-hot.test.ts @@ -1,9 +1,8 @@ import { HOUR } from '@atproto/common' import AtpAgent, { AtUri } from '@atproto/api' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { makeAlgos } from '../../src' -import { TestNetwork } from '@atproto/dev-env' describe.skip('algo whats-hot', () => { let network: TestNetwork @@ -28,8 +27,7 @@ describe.skip('algo whats-hot', () => { bsky: { algos: makeAlgos(feedPublisherDid) }, }) agent = new AtpAgent({ service: network.bsky.url }) - const pdsAgent = new AtpAgent({ service: network.pds.url }) - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice @@ -46,7 +44,7 @@ describe.skip('algo whats-hot', () => { it('returns well liked posts', async () => { const img = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) const one = await sc.post(carol, 'carol is in the chat') diff --git a/packages/bsky/tests/algos/with-friends.test.ts b/packages/bsky/tests/algos/with-friends.test.ts index 12f35083ae4..2c5339849c8 100644 --- a/packages/bsky/tests/algos/with-friends.test.ts +++ b/packages/bsky/tests/algos/with-friends.test.ts @@ -1,8 +1,7 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { RecordRef, SeedClient } from '../seeds/client' import userSeed from '../seeds/users' import { makeAlgos } from '../../src' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env' describe.skip('algo with friends', () => { let network: TestNetwork @@ -28,8 +27,7 @@ describe.skip('algo with friends', () => { bsky: { algos: makeAlgos(feedPublisherDid) }, }) agent = new AtpAgent({ service: network.bsky.url }) - const pdsAgent = new AtpAgent({ service: network.pds.url }) - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await userSeed(sc) alice = sc.dids.alice diff --git a/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts b/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts index f9fc320dcb9..09422cd8d6e 100644 --- a/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts +++ b/packages/bsky/tests/auto-moderator/fuzzy-matcher.test.ts @@ -1,6 +1,5 @@ +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { FuzzyMatcher, encode } from '../../src/auto-moderator/fuzzy-matcher' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { AtpAgent } from '@atproto/api' import { ImageInvalidator } from '../../src/image/invalidator' @@ -25,7 +24,7 @@ describe('fuzzy matcher', () => { }) fuzzyMatcher = new FuzzyMatcher(['evil', 'mean', 'bad'], ['baddie']) agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/auto-moderator/labeler.test.ts b/packages/bsky/tests/auto-moderator/labeler.test.ts index 7227e769549..dbd486c6061 100644 --- a/packages/bsky/tests/auto-moderator/labeler.test.ts +++ b/packages/bsky/tests/auto-moderator/labeler.test.ts @@ -1,12 +1,11 @@ -import { AtUri, AtpAgent, BlobRef } from '@atproto/api' +import { TestNetwork } from '@atproto/dev-env' +import { AtUri, BlobRef } from '@atproto/api' import { Readable } from 'stream' import { AutoModerator } from '../../src/auto-moderator' import IndexerContext from '../../src/indexer/context' import { cidForRecord } from '@atproto/repo' -import { cidForCbor, TID } from '@atproto/common' +import { TID } from '@atproto/common' import { LabelService } from '../../src/services/label' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' import usersSeed from '../seeds/users' import { CID } from 'multiformats/cid' import { ImgLabeler } from '../../src/auto-moderator/hive' @@ -37,8 +36,7 @@ describe('labeler', () => { autoMod = ctx.autoMod autoMod.imgLabeler = new TestImgLabeler() labelSrvc = ctx.services.label(ctx.db) - const pdsAgent = new AtpAgent({ service: network.pds.url }) - const sc = new SeedClient(pdsAgent) + const sc = network.getSeedClient() await usersSeed(sc) await network.processAll() alice = sc.dids.alice @@ -58,7 +56,7 @@ describe('labeler', () => { await repoSvc.blobs.associateBlob( preparedBlobRef, postUri(), - await cidForCbor(1), + TID.nextStr(), alice, ) return blobRef diff --git a/packages/bsky/tests/auto-moderator/takedowns.test.ts b/packages/bsky/tests/auto-moderator/takedowns.test.ts index 32c5c941642..6c7b0669b77 100644 --- a/packages/bsky/tests/auto-moderator/takedowns.test.ts +++ b/packages/bsky/tests/auto-moderator/takedowns.test.ts @@ -1,10 +1,9 @@ import fs from 'fs/promises' +import { TestNetwork, SeedClient, ImageRef } from '@atproto/dev-env' import { AtpAgent } from '@atproto/api' import { AutoModerator } from '../../src/auto-moderator' import IndexerContext from '../../src/indexer/context' import { sha256RawToCid } from '@atproto/common' -import { TestNetwork } from '@atproto/dev-env' -import { ImageRef, SeedClient } from '../seeds/client' import usersSeed from '../seeds/users' import { CID } from 'multiformats/cid' import { AtUri } from '@atproto/syntax' @@ -41,31 +40,31 @@ describe('takedowner', () => { autoMod = ctx.autoMod autoMod.imageFlagger = new TestFlagger() pdsAgent = new AtpAgent({ service: network.pds.url }) - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await usersSeed(sc) await network.processAll() alice = sc.dids.alice const fileBytes1 = await fs.readFile( - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', ) const fileBytes2 = await fs.readFile( - 'tests/image/fixtures/key-portrait-large.jpg', + 'tests/sample-img/key-portrait-large.jpg', ) badCid1 = sha256RawToCid(await sha256(fileBytes1)) badCid2 = sha256RawToCid(await sha256(fileBytes2)) goodBlob = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) badBlob1 = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) badBlob2 = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-large.jpg', + 'tests/sample-img/key-portrait-large.jpg', 'image/jpeg', ) }) diff --git a/packages/bsky/tests/blob-resolver.test.ts b/packages/bsky/tests/blob-resolver.test.ts index fcb2b657ee5..9a4d7f55c72 100644 --- a/packages/bsky/tests/blob-resolver.test.ts +++ b/packages/bsky/tests/blob-resolver.test.ts @@ -2,7 +2,6 @@ import axios, { AxiosInstance } from 'axios' import { CID } from 'multiformats/cid' import { verifyCidForBytes } from '@atproto/common' import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from './seeds/client' import basicSeed from './seeds/basic' import { randomBytes } from '@atproto/crypto' @@ -16,8 +15,7 @@ describe('blob resolver', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_blob_resolver', }) - const pdsAgent = network.pds.getClient() - const sc = new SeedClient(pdsAgent) + const sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/did-cache.test.ts b/packages/bsky/tests/did-cache.test.ts index be7544a4a3b..d0b94147bc6 100644 --- a/packages/bsky/tests/did-cache.test.ts +++ b/packages/bsky/tests/did-cache.test.ts @@ -1,6 +1,4 @@ -import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from './seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import userSeed from './seeds/users' import { IdResolver } from '@atproto/identity' import DidSqlCache from '../src/did-cache' @@ -23,8 +21,7 @@ describe('did cache', () => { }) idResolver = network.bsky.indexer.ctx.idResolver didCache = network.bsky.indexer.ctx.didCache - const pdsAgent = new AtpAgent({ service: network.pds.url }) - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await userSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/feed-generation.test.ts b/packages/bsky/tests/feed-generation.test.ts index 17df03de966..09dfd92acc8 100644 --- a/packages/bsky/tests/feed-generation.test.ts +++ b/packages/bsky/tests/feed-generation.test.ts @@ -1,8 +1,12 @@ import { TID } from '@atproto/common' import { AtUri, AtpAgent } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { TestFeedGen } from '@atproto/dev-env/src/feed-gen' -import { Handler as SkeletonHandler } from '@atproto/bsky/src/lexicon/types/app/bsky/feed/getFeedSkeleton' +import { + TestNetwork, + TestFeedGen, + SeedClient, + RecordRef, +} from '@atproto/dev-env' +import { Handler as SkeletonHandler } from '../src/lexicon/types/app/bsky/feed/getFeedSkeleton' import { GeneratorView } from '@atproto/api/src/client/types/app/bsky/feed/defs' import { UnknownFeedError } from '@atproto/api/src/client/types/app/bsky/feed/getFeed' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' @@ -11,9 +15,7 @@ import { FeedViewPost, SkeletonFeedPost, } from '../src/lexicon/types/app/bsky/feed/defs' -import { SeedClient } from './seeds/client' import basicSeed from './seeds/basic' -import { RecordRef } from './seeds/client' import { forSnapshot, paginateAll } from './_util' describe('feed generation', () => { @@ -38,7 +40,7 @@ describe('feed generation', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/handle-invalidation.test.ts b/packages/bsky/tests/handle-invalidation.test.ts index 3b9ae789265..972f1b6cc58 100644 --- a/packages/bsky/tests/handle-invalidation.test.ts +++ b/packages/bsky/tests/handle-invalidation.test.ts @@ -1,7 +1,6 @@ import { DAY } from '@atproto/common' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { AtpAgent } from '@atproto/api' -import { SeedClient } from './seeds/client' import userSeed from './seeds/users' describe('handle invalidation', () => { @@ -20,7 +19,7 @@ describe('handle invalidation', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await userSeed(sc) await network.processAll() diff --git a/packages/bsky/tests/image/server.test.ts b/packages/bsky/tests/image/server.test.ts index 072059b52df..3bce638ce45 100644 --- a/packages/bsky/tests/image/server.test.ts +++ b/packages/bsky/tests/image/server.test.ts @@ -1,10 +1,8 @@ import axios, { AxiosInstance } from 'axios' import { CID } from 'multiformats/cid' -import { AtpAgent } from '@atproto/api' import { cidForCbor } from '@atproto/common' import { TestNetwork } from '@atproto/dev-env' import { getInfo } from '../../src/image/sharp' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { ImageUriBuilder } from '../../src/image/uri' @@ -18,8 +16,7 @@ describe('image processing server', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_image_processing_server', }) - const pdsAgent = new AtpAgent({ service: network.pds.url }) - const sc = new SeedClient(pdsAgent) + const sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/image/sharp.test.ts b/packages/bsky/tests/image/sharp.test.ts index d0a46b662b3..17b3b7f3964 100644 --- a/packages/bsky/tests/image/sharp.test.ts +++ b/packages/bsky/tests/image/sharp.test.ts @@ -178,7 +178,7 @@ describe('sharp image processor', () => { }) async function processFixture(fixture: string, options: Options) { - const image = createReadStream(`${__dirname}/fixtures/${fixture}`) + const image = createReadStream(`tests/sample-img/${fixture}`) const resized = await resize(image, options) return await getInfo(resized) } diff --git a/packages/bsky/tests/indexing.test.ts b/packages/bsky/tests/indexing.test.ts index cee3ed5a768..9457544b3e5 100644 --- a/packages/bsky/tests/indexing.test.ts +++ b/packages/bsky/tests/indexing.test.ts @@ -11,9 +11,8 @@ import AtpAgent, { AppBskyFeedRepost, AppBskyGraphFollow, } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot } from './_util' -import { SeedClient } from './seeds/client' import usersSeed from './seeds/users' import basicSeed from './seeds/basic' import { ids } from '../src/lexicon/lexicons' @@ -31,7 +30,7 @@ describe('indexing', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await usersSeed(sc) // Data in tests is not processed from subscription await network.processAll() @@ -648,8 +647,9 @@ describe('indexing', () => { headers: sc.getHeaders(alice), }) const { token } = await network.pds.ctx.db.db - .selectFrom('delete_account_token') + .selectFrom('email_token') .selectAll() + .where('purpose', '=', 'delete_account') .where('did', '=', alice) .executeTakeFirstOrThrow() await pdsAgent.api.com.atproto.server.deleteAccount({ diff --git a/packages/bsky/tests/moderation.test.ts b/packages/bsky/tests/moderation.test.ts index 109b576bb6f..e1af045693b 100644 --- a/packages/bsky/tests/moderation.test.ts +++ b/packages/bsky/tests/moderation.test.ts @@ -1,9 +1,8 @@ -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, ImageRef, RecordRef, SeedClient } from '@atproto/dev-env' import { TID, cidForCbor } from '@atproto/common' import AtpAgent, { ComAtprotoAdminTakeModerationAction } from '@atproto/api' import { AtUri } from '@atproto/syntax' import { forSnapshot } from './_util' -import { ImageRef, RecordRef, SeedClient } from './seeds/client' import basicSeed from './seeds/basic' import { ACKNOWLEDGE, @@ -27,8 +26,7 @@ describe('moderation', () => { dbPostgresSchema: 'bsky_moderation', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() }) diff --git a/packages/bsky/tests/notification-server.test.ts b/packages/bsky/tests/notification-server.test.ts index aeb7f8ae97c..6f9c8b00224 100644 --- a/packages/bsky/tests/notification-server.test.ts +++ b/packages/bsky/tests/notification-server.test.ts @@ -1,6 +1,5 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from './seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from './seeds/basic' import { NotificationServer } from '../src/notifications' import { Database } from '../src' @@ -21,7 +20,7 @@ describe('notification server', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/pipeline/backpressure.test.ts b/packages/bsky/tests/pipeline/backpressure.test.ts index a265bc948c5..583d749100e 100644 --- a/packages/bsky/tests/pipeline/backpressure.test.ts +++ b/packages/bsky/tests/pipeline/backpressure.test.ts @@ -5,8 +5,8 @@ import { getIndexers, getIngester, processAll, + SeedClient, } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { BskyIngester } from '../../src' @@ -33,8 +33,7 @@ describe('pipeline backpressure', () => { name: TEST_NAME, partitionIdsByIndexer: [[0], [1]], }) - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) }) diff --git a/packages/bsky/tests/pipeline/reingest.test.ts b/packages/bsky/tests/pipeline/reingest.test.ts index ed8afdfe36d..3c860bcf680 100644 --- a/packages/bsky/tests/pipeline/reingest.test.ts +++ b/packages/bsky/tests/pipeline/reingest.test.ts @@ -1,5 +1,9 @@ -import { TestNetworkNoAppView, getIngester, ingestAll } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { + TestNetworkNoAppView, + SeedClient, + getIngester, + ingestAll, +} from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { BskyIngester } from '../../src' @@ -18,8 +22,7 @@ describe('pipeline reingestion', () => { name: TEST_NAME, ingesterPartitionCount: 1, }) - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) }) diff --git a/packages/bsky/tests/pipeline/repartition.test.ts b/packages/bsky/tests/pipeline/repartition.test.ts index 12205e56315..f228b954fb6 100644 --- a/packages/bsky/tests/pipeline/repartition.test.ts +++ b/packages/bsky/tests/pipeline/repartition.test.ts @@ -1,12 +1,12 @@ import { BskyIndexers, TestNetworkNoAppView, + SeedClient, getIndexers, getIngester, ingestAll, processAll, } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' import usersSeed from '../seeds/users' import { BskyIngester } from '../../src' import { countAll } from '../../src/db/util' @@ -36,8 +36,7 @@ describe('pipeline indexer repartitioning', () => { name: TEST_NAME, partitionIdsByIndexer: [[0], [1]], // two indexers, each consuming one partition }) - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await usersSeed(sc) }) diff --git a/packages/bsky/tests/reprocessing.test.ts b/packages/bsky/tests/reprocessing.test.ts index dd170c570ab..046bc58076b 100644 --- a/packages/bsky/tests/reprocessing.test.ts +++ b/packages/bsky/tests/reprocessing.test.ts @@ -1,14 +1,11 @@ import axios from 'axios' import { AtUri } from '@atproto/syntax' -import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from './seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from './seeds/basic' import { Database } from '../src/db' describe('reprocessing', () => { let network: TestNetwork - let pdsAgent: AtpAgent let sc: SeedClient let alice: string @@ -16,8 +13,7 @@ describe('reprocessing', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_reprocessing', }) - pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice await network.processAll() diff --git a/packages/bsky/tests/image/fixtures/at.png b/packages/bsky/tests/sample-img/at.png similarity index 100% rename from packages/bsky/tests/image/fixtures/at.png rename to packages/bsky/tests/sample-img/at.png diff --git a/packages/bsky/tests/image/fixtures/hd-key.jpg b/packages/bsky/tests/sample-img/hd-key.jpg similarity index 100% rename from packages/bsky/tests/image/fixtures/hd-key.jpg rename to packages/bsky/tests/sample-img/hd-key.jpg diff --git a/packages/bsky/tests/image/fixtures/key-alt.jpg b/packages/bsky/tests/sample-img/key-alt.jpg similarity index 100% rename from packages/bsky/tests/image/fixtures/key-alt.jpg rename to packages/bsky/tests/sample-img/key-alt.jpg diff --git a/packages/bsky/tests/image/fixtures/key-landscape-large.jpg b/packages/bsky/tests/sample-img/key-landscape-large.jpg similarity index 100% rename from packages/bsky/tests/image/fixtures/key-landscape-large.jpg rename to packages/bsky/tests/sample-img/key-landscape-large.jpg diff --git a/packages/bsky/tests/image/fixtures/key-landscape-small.jpg b/packages/bsky/tests/sample-img/key-landscape-small.jpg similarity index 100% rename from packages/bsky/tests/image/fixtures/key-landscape-small.jpg rename to packages/bsky/tests/sample-img/key-landscape-small.jpg diff --git a/packages/bsky/tests/image/fixtures/key-portrait-large.jpg b/packages/bsky/tests/sample-img/key-portrait-large.jpg similarity index 100% rename from packages/bsky/tests/image/fixtures/key-portrait-large.jpg rename to packages/bsky/tests/sample-img/key-portrait-large.jpg diff --git a/packages/bsky/tests/image/fixtures/key-portrait-small.jpg b/packages/bsky/tests/sample-img/key-portrait-small.jpg similarity index 100% rename from packages/bsky/tests/image/fixtures/key-portrait-small.jpg rename to packages/bsky/tests/sample-img/key-portrait-small.jpg diff --git a/packages/bsky/tests/seeds/basic.ts b/packages/bsky/tests/seeds/basic.ts index c1bd7e41e09..22c6fba01c5 100644 --- a/packages/bsky/tests/seeds/basic.ts +++ b/packages/bsky/tests/seeds/basic.ts @@ -1,5 +1,5 @@ +import { SeedClient } from '@atproto/dev-env' import { ids } from '../../src/lexicon/lexicons' -import { SeedClient } from './client' import usersSeed from './users' export default async (sc: SeedClient, users = true) => { @@ -34,12 +34,12 @@ export default async (sc: SeedClient, users = true) => { }) const img1 = await sc.uploadFile( carol, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) const img2 = await sc.uploadFile( carol, - 'tests/image/fixtures/key-alt.jpg', + 'tests/sample-img/key-alt.jpg', 'image/jpeg', ) await sc.post( @@ -100,7 +100,7 @@ export default async (sc: SeedClient, users = true) => { const replyImg = await sc.uploadFile( bob, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) await sc.reply( diff --git a/packages/bsky/tests/seeds/client.ts b/packages/bsky/tests/seeds/client.ts deleted file mode 100644 index ee551214789..00000000000 --- a/packages/bsky/tests/seeds/client.ts +++ /dev/null @@ -1,466 +0,0 @@ -import fs from 'fs/promises' -import { CID } from 'multiformats/cid' -import AtpAgent from '@atproto/api' -import { AtUri } from '@atproto/syntax' -import { BlobRef } from '@atproto/lexicon' -import { Main as Facet } from '@atproto/api/src/client/types/app/bsky/richtext/facet' -import { InputSchema as TakeActionInput } from '@atproto/api/src/client/types/com/atproto/admin/takeModerationAction' -import { InputSchema as CreateReportInput } from '@atproto/api/src/client/types/com/atproto/moderation/createReport' -import { Record as PostRecord } from '@atproto/api/src/client/types/app/bsky/feed/post' -import { Record as LikeRecord } from '@atproto/api/src/client/types/app/bsky/feed/like' -import { Record as FollowRecord } from '@atproto/api/src/client/types/app/bsky/graph/follow' - -// Makes it simple to create data via the XRPC client, -// and keeps track of all created data in memory for convenience. - -let AVATAR_IMG: Uint8Array | undefined - -export type ImageRef = { - image: BlobRef - alt: string -} - -export class RecordRef { - uri: AtUri - cid: CID - - constructor(uri: AtUri | string, cid: CID | string) { - this.uri = new AtUri(uri.toString()) - this.cid = CID.parse(cid.toString()) - } - - get raw(): { uri: string; cid: string } { - return { - uri: this.uri.toString(), - cid: this.cid.toString(), - } - } - - get uriStr(): string { - return this.uri.toString() - } - - get cidStr(): string { - return this.cid.toString() - } -} - -export class SeedClient { - accounts: Record< - string, - { - did: string - accessJwt: string - refreshJwt: string - handle: string - email: string - password: string - } - > - profiles: Record< - string, - { - displayName: string - description: string - avatar: { cid: string; mimeType: string } - ref: RecordRef - } - > - follows: Record> - posts: Record< - string, - { text: string; ref: RecordRef; images: ImageRef[]; quote?: RecordRef }[] - > - likes: Record> - replies: Record - reposts: Record - lists: Record< - string, - Record }> - > - dids: Record - - constructor(public agent: AtpAgent, public adminAuth?: string) { - this.accounts = {} - this.profiles = {} - this.follows = {} - this.posts = {} - this.likes = {} - this.replies = {} - this.reposts = {} - this.lists = {} - this.dids = {} - } - - async createAccount( - shortName: string, - params: { - handle: string - email: string - password: string - }, - ) { - const { data: account } = - await this.agent.api.com.atproto.server.createAccount(params) - this.dids[shortName] = account.did - this.accounts[account.did] = { - ...account, - email: params.email, - password: params.password, - } - return this.accounts[account.did] - } - - async updateHandle(by: string, handle: string) { - await this.agent.api.com.atproto.identity.updateHandle( - { handle }, - { encoding: 'application/json', headers: this.getHeaders(by) }, - ) - } - - async createProfile( - by: string, - displayName: string, - description: string, - selfLabels?: string[], - ) { - AVATAR_IMG ??= await fs.readFile( - 'tests/image/fixtures/key-portrait-small.jpg', - ) - - let avatarBlob - { - const res = await this.agent.api.com.atproto.repo.uploadBlob(AVATAR_IMG, { - encoding: 'image/jpeg', - headers: this.getHeaders(by), - } as any) - avatarBlob = res.data.blob - } - - { - const res = await this.agent.api.app.bsky.actor.profile.create( - { repo: by }, - { - displayName, - description, - avatar: avatarBlob, - labels: selfLabels - ? { - $type: 'com.atproto.label.defs#selfLabels', - values: selfLabels.map((val) => ({ val })), - } - : undefined, - }, - this.getHeaders(by), - ) - this.profiles[by] = { - displayName, - description, - avatar: avatarBlob, - ref: new RecordRef(res.uri, res.cid), - } - } - return this.profiles[by] - } - - async follow(from: string, to: string, overrides?: Partial) { - const res = await this.agent.api.app.bsky.graph.follow.create( - { repo: from }, - { - subject: to, - createdAt: new Date().toISOString(), - ...overrides, - }, - this.getHeaders(from), - ) - this.follows[from] ??= {} - this.follows[from][to] = new RecordRef(res.uri, res.cid) - return this.follows[from][to] - } - - async unfollow(from: string, to: string) { - const follow = this.follows[from][to] - if (!follow) { - throw new Error('follow does not exist') - } - await this.agent.api.app.bsky.graph.follow.delete( - { repo: from, rkey: follow.uri.rkey }, - this.getHeaders(from), - ) - delete this.follows[from][to] - } - - async post( - by: string, - text: string, - facets?: Facet[], - images?: ImageRef[], - quote?: RecordRef, - overrides?: Partial, - ) { - const imageEmbed = images && { - $type: 'app.bsky.embed.images', - images, - } - const recordEmbed = quote && { - record: { uri: quote.uriStr, cid: quote.cidStr }, - } - const embed = - imageEmbed && recordEmbed - ? { - $type: 'app.bsky.embed.recordWithMedia', - record: recordEmbed, - media: imageEmbed, - } - : recordEmbed - ? { $type: 'app.bsky.embed.record', ...recordEmbed } - : imageEmbed - const res = await this.agent.api.app.bsky.feed.post.create( - { repo: by }, - { - text: text, - facets, - embed, - createdAt: new Date().toISOString(), - ...overrides, - }, - this.getHeaders(by), - ) - this.posts[by] ??= [] - const post = { - text, - ref: new RecordRef(res.uri, res.cid), - images: images ?? [], - quote, - } - this.posts[by].push(post) - return post - } - - async deletePost(by: string, uri: AtUri) { - await this.agent.api.app.bsky.feed.post.delete( - { - repo: by, - rkey: uri.rkey, - }, - this.getHeaders(by), - ) - } - - async uploadFile( - by: string, - filePath: string, - encoding: string, - ): Promise { - const file = await fs.readFile(filePath) - const res = await this.agent.api.com.atproto.repo.uploadBlob(file, { - headers: this.getHeaders(by), - encoding, - } as any) - return { image: res.data.blob, alt: filePath } - } - - async like(by: string, subject: RecordRef, overrides?: Partial) { - const res = await this.agent.api.app.bsky.feed.like.create( - { repo: by }, - { - subject: subject.raw, - createdAt: new Date().toISOString(), - ...overrides, - }, - this.getHeaders(by), - ) - this.likes[by] ??= {} - this.likes[by][subject.uriStr] = new AtUri(res.uri) - return this.likes[by][subject.uriStr] - } - - async reply( - by: string, - root: RecordRef, - parent: RecordRef, - text: string, - facets?: Facet[], - images?: ImageRef[], - ) { - const embed = images - ? { - $type: 'app.bsky.embed.images', - images, - } - : undefined - const res = await this.agent.api.app.bsky.feed.post.create( - { repo: by }, - { - text: text, - reply: { - root: root.raw, - parent: parent.raw, - }, - facets, - embed, - createdAt: new Date().toISOString(), - }, - this.getHeaders(by), - ) - this.replies[by] ??= [] - const reply = { - text, - ref: new RecordRef(res.uri, res.cid), - } - this.replies[by].push(reply) - return reply - } - - async repost(by: string, subject: RecordRef) { - const res = await this.agent.api.app.bsky.feed.repost.create( - { repo: by }, - { subject: subject.raw, createdAt: new Date().toISOString() }, - this.getHeaders(by), - ) - this.reposts[by] ??= [] - const repost = new RecordRef(res.uri, res.cid) - this.reposts[by].push(repost) - return repost - } - - async createList(by: string, name: string, purpose: 'mod' | 'curate') { - const res = await this.agent.api.app.bsky.graph.list.create( - { repo: by }, - { - name, - purpose: - purpose === 'mod' - ? 'app.bsky.graph.defs#modlist' - : 'app.bsky.graph.defs#curatelist', - createdAt: new Date().toISOString(), - }, - this.getHeaders(by), - ) - this.lists[by] ??= {} - const ref = new RecordRef(res.uri, res.cid) - this.lists[by][ref.uriStr] = { - ref: ref, - items: {}, - } - return ref - } - - async addToList(by: string, subject: string, list: RecordRef) { - const res = await this.agent.api.app.bsky.graph.listitem.create( - { repo: by }, - { subject, list: list.uriStr, createdAt: new Date().toISOString() }, - this.getHeaders(by), - ) - const ref = new RecordRef(res.uri, res.cid) - const found = (this.lists[by] ?? {})[list.uriStr] - if (found) { - found.items[subject] = ref - } - return ref - } - - async rmFromList(by: string, subject: string, list: RecordRef) { - const foundList = (this.lists[by] ?? {})[list.uriStr] ?? {} - if (!foundList) return - const foundItem = foundList.items[subject] - if (!foundItem) return - await this.agent.api.app.bsky.graph.listitem.delete( - { repo: by, rkey: foundItem.uri.rkey }, - this.getHeaders(by), - ) - delete foundList.items[subject] - } - - async takeModerationAction(opts: { - action: TakeActionInput['action'] - subject: TakeActionInput['subject'] - reason?: string - createdBy?: string - }) { - if (!this.adminAuth) { - throw new Error('No admin auth provided to seed client') - } - const { - action, - subject, - reason = 'X', - createdBy = 'did:example:admin', - } = opts - const result = await this.agent.api.com.atproto.admin.takeModerationAction( - { action, subject, createdBy, reason }, - { - encoding: 'application/json', - headers: { authorization: this.adminAuth }, - }, - ) - return result.data - } - - async reverseModerationAction(opts: { - id: number - reason?: string - createdBy?: string - }) { - if (!this.adminAuth) { - throw new Error('No admin auth provided to seed client') - } - - const { id, reason = 'X', createdBy = 'did:example:admin' } = opts - const result = - await this.agent.api.com.atproto.admin.reverseModerationAction( - { id, reason, createdBy }, - { - encoding: 'application/json', - headers: { authorization: this.adminAuth }, - }, - ) - return result.data - } - - async resolveReports(opts: { - actionId: number - reportIds: number[] - createdBy?: string - }) { - if (!this.adminAuth) { - throw new Error('No admin auth provided to seed client') - } - - const { actionId, reportIds, createdBy = 'did:example:admin' } = opts - const result = - await this.agent.api.com.atproto.admin.resolveModerationReports( - { actionId, createdBy, reportIds }, - { - encoding: 'application/json', - headers: { authorization: this.adminAuth }, - }, - ) - return result.data - } - - async createReport(opts: { - reasonType: CreateReportInput['reasonType'] - subject: CreateReportInput['subject'] - reason?: string - reportedBy: string - }) { - const { reasonType, subject, reason, reportedBy } = opts - const result = await this.agent.api.com.atproto.moderation.createReport( - { reasonType, subject, reason }, - { - encoding: 'application/json', - headers: this.getHeaders(reportedBy), - }, - ) - return result.data - } - - getHeaders(did: string) { - return SeedClient.getHeaders(this.accounts[did].accessJwt) - } - - static getHeaders(jwt: string) { - return { authorization: `Bearer ${jwt}` } - } -} diff --git a/packages/bsky/tests/seeds/follows.ts b/packages/bsky/tests/seeds/follows.ts index f15156dbff5..1abe555ff00 100644 --- a/packages/bsky/tests/seeds/follows.ts +++ b/packages/bsky/tests/seeds/follows.ts @@ -1,4 +1,4 @@ -import { SeedClient } from './client' +import { SeedClient } from '@atproto/dev-env' export default async (sc: SeedClient) => { await sc.createAccount('alice', users.alice) diff --git a/packages/bsky/tests/seeds/likes.ts b/packages/bsky/tests/seeds/likes.ts index 1747fb2fa59..9c68375c52f 100644 --- a/packages/bsky/tests/seeds/likes.ts +++ b/packages/bsky/tests/seeds/likes.ts @@ -1,5 +1,5 @@ +import { SeedClient } from '@atproto/dev-env' import basicSeed from './basic' -import { SeedClient } from './client' export default async (sc: SeedClient) => { await basicSeed(sc) diff --git a/packages/bsky/tests/seeds/reposts.ts b/packages/bsky/tests/seeds/reposts.ts index 8de9b8ec655..9bb444ec8f2 100644 --- a/packages/bsky/tests/seeds/reposts.ts +++ b/packages/bsky/tests/seeds/reposts.ts @@ -1,5 +1,5 @@ +import { SeedClient } from '@atproto/dev-env' import basicSeed from './basic' -import { SeedClient } from './client' export default async (sc: SeedClient) => { await basicSeed(sc) diff --git a/packages/bsky/tests/seeds/users-bulk.ts b/packages/bsky/tests/seeds/users-bulk.ts index 647279c74db..c20ce85de51 100644 --- a/packages/bsky/tests/seeds/users-bulk.ts +++ b/packages/bsky/tests/seeds/users-bulk.ts @@ -1,5 +1,5 @@ +import { SeedClient } from '@atproto/dev-env' import { chunkArray } from '@atproto/common' -import { SeedClient } from './client' export default async (sc: SeedClient, max = Infinity) => { // @TODO when these are run in parallel, seem to get an intermittent diff --git a/packages/bsky/tests/seeds/users.ts b/packages/bsky/tests/seeds/users.ts index 8c14b894db4..2ed5762065a 100644 --- a/packages/bsky/tests/seeds/users.ts +++ b/packages/bsky/tests/seeds/users.ts @@ -1,4 +1,4 @@ -import { SeedClient } from './client' +import { SeedClient } from '@atproto/dev-env' export default async (sc: SeedClient) => { await sc.createAccount('alice', users.alice) diff --git a/packages/bsky/tests/server.test.ts b/packages/bsky/tests/server.test.ts index be2f1c0213e..3f54b2a37bd 100644 --- a/packages/bsky/tests/server.test.ts +++ b/packages/bsky/tests/server.test.ts @@ -4,7 +4,6 @@ import axios, { AxiosError } from 'axios' import { TestNetwork } from '@atproto/dev-env' import { handler as errorHandler } from '../src/error' import { Database } from '../src' -import { SeedClient } from './seeds/client' import basicSeed from './seeds/basic' describe('server', () => { @@ -16,8 +15,7 @@ describe('server', () => { network = await TestNetwork.create({ dbPostgresSchema: 'bsky_server', }) - const pdsAgent = network.pds.getClient() - const sc = new SeedClient(pdsAgent) + const sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/subscription/repo.test.ts b/packages/bsky/tests/subscription/repo.test.ts index 43c1287ba95..dcdc77cd7a8 100644 --- a/packages/bsky/tests/subscription/repo.test.ts +++ b/packages/bsky/tests/subscription/repo.test.ts @@ -1,6 +1,5 @@ import AtpAgent from '@atproto/api' - -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { CommitData } from '@atproto/repo' import { RepoService } from '@atproto/pds/src/services/repo' import { PreparedWrite } from '@atproto/pds/src/repo' @@ -11,7 +10,6 @@ import { ids } from '../../src/lexicon/lexicons' import { forSnapshot } from '../_util' import { AppContext, Database } from '../../src' import basicSeed from '../seeds/basic' -import { SeedClient } from '../seeds/client' describe('sync', () => { let network: TestNetwork @@ -25,7 +23,7 @@ describe('sync', () => { }) ctx = network.bsky.ctx pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) }) diff --git a/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap b/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap index 59123e54b20..a2549b0a52c 100644 --- a/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/author-feed.test.ts.snap @@ -77,7 +77,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", }, @@ -110,7 +110,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -260,7 +260,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -271,7 +271,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -486,7 +486,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(1)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(1)/cids(2)@jpeg", }, @@ -519,7 +519,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -745,12 +745,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(2)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(3)@jpeg", }, @@ -800,7 +800,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -811,7 +811,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1036,12 +1036,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(2)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(3)@jpeg", }, @@ -1091,7 +1091,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1102,7 +1102,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1224,7 +1224,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -1257,7 +1257,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1431,12 +1431,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(8)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(8)@jpeg", }, @@ -1486,7 +1486,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1497,7 +1497,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1662,7 +1662,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", }, @@ -1695,7 +1695,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1848,7 +1848,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1859,7 +1859,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap b/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap index 5ee901c65d8..ba5c00182de 100644 --- a/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/blocks.test.ts.snap @@ -295,7 +295,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(4)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(4)@jpeg", }, @@ -328,7 +328,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap b/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap index 34d5712d303..d6712c89c56 100644 --- a/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/list-feed.test.ts.snap @@ -78,7 +78,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", }, @@ -111,7 +111,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -209,7 +209,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", }, @@ -242,7 +242,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -444,7 +444,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -455,7 +455,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap b/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap index 2585a96ec42..2824414f97b 100644 --- a/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/mute-lists.test.ts.snap @@ -228,7 +228,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(6)@jpeg", }, @@ -261,7 +261,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap b/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap index fb0eb1fc5d1..ca8b664ec91 100644 --- a/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/mutes.test.ts.snap @@ -205,7 +205,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -238,7 +238,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap b/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap index 5fddc479c76..bce3d4e5139 100644 --- a/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/notifications.test.ts.snap @@ -272,7 +272,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -716,7 +716,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/posts.test.ts.snap b/packages/bsky/tests/views/__snapshots__/posts.test.ts.snap index df8a4cdf826..3b14a184dc1 100644 --- a/packages/bsky/tests/views/__snapshots__/posts.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/posts.test.ts.snap @@ -156,12 +156,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(6)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(7)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(7)@jpeg", }, @@ -212,7 +212,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -223,7 +223,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -286,12 +286,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(6)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(7)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(7)@jpeg", }, @@ -342,7 +342,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -353,7 +353,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap b/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap index 4cdd3555805..6bc84753951 100644 --- a/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/thread.test.ts.snap @@ -73,7 +73,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", }, @@ -106,7 +106,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -310,7 +310,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -343,7 +343,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -551,7 +551,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -584,7 +584,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1116,7 +1116,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(4)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(4)@jpeg", }, @@ -1149,7 +1149,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1319,7 +1319,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(4)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(4)@jpeg", }, @@ -1352,7 +1352,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap b/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap index fe9b243c10a..b5863382fef 100644 --- a/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap +++ b/packages/bsky/tests/views/__snapshots__/timeline.test.ts.snap @@ -999,12 +999,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(11)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(11)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(12)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(12)@jpeg", }, @@ -1073,7 +1073,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1084,7 +1084,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1312,7 +1312,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -1345,7 +1345,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1515,12 +1515,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(8)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(8)@jpeg", }, @@ -1589,7 +1589,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1600,7 +1600,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1748,7 +1748,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -1781,7 +1781,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2004,7 +2004,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -2037,7 +2037,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2241,7 +2241,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2252,7 +2252,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2448,12 +2448,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(8)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(8)@jpeg", }, @@ -2522,7 +2522,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2533,7 +2533,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2638,12 +2638,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(8)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(8)@jpeg", }, @@ -2712,7 +2712,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2723,7 +2723,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2897,12 +2897,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(2)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(3)@jpeg", }, @@ -2969,7 +2969,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -2980,7 +2980,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -3127,7 +3127,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(2)@jpeg", }, @@ -3160,7 +3160,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -3392,7 +3392,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(2)@jpeg", }, @@ -3425,7 +3425,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -3638,7 +3638,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -3649,7 +3649,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -3832,12 +3832,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(2)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(3)@jpeg", }, @@ -3904,7 +3904,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -3915,7 +3915,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4087,12 +4087,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(2)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(3)@jpeg", }, @@ -4160,7 +4160,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4171,7 +4171,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4320,7 +4320,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(2)@jpeg", }, @@ -4353,7 +4353,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4649,7 +4649,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4660,7 +4660,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4814,12 +4814,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(2)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(2)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(3)@jpeg", }, @@ -4887,7 +4887,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -4898,7 +4898,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -5084,7 +5084,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -5117,7 +5117,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -5277,7 +5277,7 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(5)@jpeg", }, @@ -5310,7 +5310,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -5500,12 +5500,12 @@ Array [ "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(9)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(9)@jpeg", }, @@ -5573,7 +5573,7 @@ Array [ "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -5584,7 +5584,7 @@ Array [ }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/bsky/tests/views/actor-likes.test.ts b/packages/bsky/tests/views/actor-likes.test.ts index cf0281fdde3..642b37e6446 100644 --- a/packages/bsky/tests/views/actor-likes.test.ts +++ b/packages/bsky/tests/views/actor-likes.test.ts @@ -1,6 +1,5 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' describe('bsky actor likes feed views', () => { @@ -20,7 +19,7 @@ describe('bsky actor likes feed views', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/views/actor-search.test.ts b/packages/bsky/tests/views/actor-search.test.ts index 77f657a9bf6..5562f747700 100644 --- a/packages/bsky/tests/views/actor-search.test.ts +++ b/packages/bsky/tests/views/actor-search.test.ts @@ -1,9 +1,8 @@ import AtpAgent from '@atproto/api' import { wait } from '@atproto/common' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { forSnapshot, paginateAll, stripViewer } from '../_util' -import { SeedClient } from '../seeds/client' import usersBulkSeed from '../seeds/users-bulk' describe('pds actor search views', () => { @@ -17,8 +16,7 @@ describe('pds actor search views', () => { dbPostgresSchema: 'bsky_views_actor_search', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await wait(50) // allow pending sub to be established await network.bsky.ingester.sub.destroy() diff --git a/packages/bsky/tests/views/admin/repo-search.test.ts b/packages/bsky/tests/views/admin/repo-search.test.ts index ec53418eb46..6d9e8468dc1 100644 --- a/packages/bsky/tests/views/admin/repo-search.test.ts +++ b/packages/bsky/tests/views/admin/repo-search.test.ts @@ -1,7 +1,6 @@ import AtpAgent, { ComAtprotoAdminSearchRepos } from '@atproto/api' import { wait } from '@atproto/common' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import usersBulkSeed from '../../seeds/users-bulk' describe('pds admin repo search views', () => { @@ -29,8 +28,7 @@ describe('pds admin repo search views', () => { dbPostgresSchema: 'bsky_views_repo_search', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await wait(100) // allow pending sub to be established await network.bsky.ingester.sub.destroy() diff --git a/packages/bsky/tests/views/author-feed.test.ts b/packages/bsky/tests/views/author-feed.test.ts index 62e0fd0826e..3d764335282 100644 --- a/packages/bsky/tests/views/author-feed.test.ts +++ b/packages/bsky/tests/views/author-feed.test.ts @@ -1,7 +1,6 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, paginateAll, stripViewerFromPost } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { isRecord } from '../../src/lexicon/types/app/bsky/feed/post' @@ -24,8 +23,7 @@ describe('pds author feed views', () => { dbPostgresSchema: 'bsky_views_author_feed', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/views/block-lists.test.ts b/packages/bsky/tests/views/block-lists.test.ts index 0a8a223e046..d2ef0387777 100644 --- a/packages/bsky/tests/views/block-lists.test.ts +++ b/packages/bsky/tests/views/block-lists.test.ts @@ -1,9 +1,7 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env' import { forSnapshot } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' -import { RecordRef } from '@atproto/bsky/tests/seeds/client' import { BlockedActorError } from '@atproto/api/src/client/types/app/bsky/feed/getAuthorFeed' import { BlockedByActorError } from '@atproto/api/src/client/types/app/bsky/feed/getAuthorFeed' @@ -25,7 +23,7 @@ describe('pds views with blocking from block lists', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob @@ -52,7 +50,7 @@ describe('pds views with blocking from block lists', () => { it('creates a list with some items', async () => { const avatar = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) // alice creates block list with bob & carol that dan uses diff --git a/packages/bsky/tests/views/blocks.test.ts b/packages/bsky/tests/views/blocks.test.ts index 0109b93f82a..312e997cb36 100644 --- a/packages/bsky/tests/views/blocks.test.ts +++ b/packages/bsky/tests/views/blocks.test.ts @@ -1,4 +1,5 @@ import assert from 'assert' +import { TestNetwork, RecordRef, SeedClient } from '@atproto/dev-env' import AtpAgent, { AtUri } from '@atproto/api' import { BlockedActorError } from '@atproto/api/src/client/types/app/bsky/feed/getAuthorFeed' import { BlockedByActorError } from '@atproto/api/src/client/types/app/bsky/feed/getAuthorFeed' @@ -7,9 +8,7 @@ import { isViewRecord as isEmbedViewRecord, isViewBlocked as isEmbedViewBlocked, } from '@atproto/api/src/client/types/app/bsky/embed/record' -import { TestNetwork } from '@atproto/dev-env' import { forSnapshot } from '../_util' -import { RecordRef, SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' describe('pds views with blocking', () => { @@ -32,7 +31,7 @@ describe('pds views with blocking', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice carol = sc.dids.carol diff --git a/packages/bsky/tests/views/follows.test.ts b/packages/bsky/tests/views/follows.test.ts index e048b433b8e..3bf89ff965e 100644 --- a/packages/bsky/tests/views/follows.test.ts +++ b/packages/bsky/tests/views/follows.test.ts @@ -1,7 +1,6 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, paginateAll, stripViewer } from '../_util' -import { SeedClient } from '../seeds/client' import followsSeed from '../seeds/follows' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' @@ -18,8 +17,7 @@ describe('pds follow views', () => { dbPostgresSchema: 'bsky_views_follows', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await followsSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/views/likes.test.ts b/packages/bsky/tests/views/likes.test.ts index baa2d161e11..f8f9c9a7fef 100644 --- a/packages/bsky/tests/views/likes.test.ts +++ b/packages/bsky/tests/views/likes.test.ts @@ -1,13 +1,11 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import likesSeed from '../seeds/likes' import { constantDate, forSnapshot, paginateAll, stripViewer } from '../_util' describe('pds like views', () => { let network: TestNetwork let agent: AtpAgent - let pdsAgent: AtpAgent let sc: SeedClient // account dids, for convenience @@ -19,8 +17,7 @@ describe('pds like views', () => { dbPostgresSchema: 'bsky_views_likes', }) agent = network.bsky.getClient() - pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await likesSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/views/list-feed.test.ts b/packages/bsky/tests/views/list-feed.test.ts index c9d94f1a9d4..baef857f437 100644 --- a/packages/bsky/tests/views/list-feed.test.ts +++ b/packages/bsky/tests/views/list-feed.test.ts @@ -1,7 +1,6 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env' import { forSnapshot, paginateAll, stripViewerFromPost } from '../_util' -import { RecordRef, SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' @@ -22,8 +21,7 @@ describe('list feed views', () => { dbPostgresSchema: 'bsky_views_list_feed', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob diff --git a/packages/bsky/tests/views/mute-lists.test.ts b/packages/bsky/tests/views/mute-lists.test.ts index a1800ad1143..de2a047b654 100644 --- a/packages/bsky/tests/views/mute-lists.test.ts +++ b/packages/bsky/tests/views/mute-lists.test.ts @@ -1,7 +1,6 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env' import { forSnapshot } from '../_util' -import { RecordRef, SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' describe('bsky views with mutes from mute lists', () => { @@ -21,7 +20,7 @@ describe('bsky views with mutes from mute lists', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob @@ -43,7 +42,7 @@ describe('bsky views with mutes from mute lists', () => { it('creates a list with some items', async () => { const avatar = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) // alice creates mute list with bob & carol that dan uses diff --git a/packages/bsky/tests/views/mutes.test.ts b/packages/bsky/tests/views/mutes.test.ts index 15be18a7b27..6a00c427124 100644 --- a/packages/bsky/tests/views/mutes.test.ts +++ b/packages/bsky/tests/views/mutes.test.ts @@ -1,14 +1,12 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, paginateAll } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import usersBulkSeed from '../seeds/users-bulk' describe('mute views', () => { let network: TestNetwork let agent: AtpAgent - let pdsAgent: AtpAgent let sc: SeedClient let alice: string let bob: string @@ -22,8 +20,7 @@ describe('mute views', () => { dbPostgresSchema: 'bsky_views_mutes', }) agent = network.bsky.getClient() - pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await usersBulkSeed(sc, 10) alice = sc.dids.alice diff --git a/packages/bsky/tests/views/notifications.test.ts b/packages/bsky/tests/views/notifications.test.ts index b125ffc3570..7bdd5d5f933 100644 --- a/packages/bsky/tests/views/notifications.test.ts +++ b/packages/bsky/tests/views/notifications.test.ts @@ -1,8 +1,7 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { forSnapshot, paginateAll } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { Notification } from '../../src/lexicon/types/app/bsky/notification/listNotifications' @@ -19,8 +18,7 @@ describe('notification views', () => { dbPostgresSchema: 'bsky_views_notifications', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/views/posts.test.ts b/packages/bsky/tests/views/posts.test.ts index 6fa12a085df..a2710a02cf7 100644 --- a/packages/bsky/tests/views/posts.test.ts +++ b/packages/bsky/tests/views/posts.test.ts @@ -1,7 +1,6 @@ import AtpAgent, { AppBskyFeedPost } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, stripViewerFromPost } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' describe('pds posts views', () => { @@ -16,7 +15,7 @@ describe('pds posts views', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/views/profile.test.ts b/packages/bsky/tests/views/profile.test.ts index a1224283794..fd3bde6d0ef 100644 --- a/packages/bsky/tests/views/profile.test.ts +++ b/packages/bsky/tests/views/profile.test.ts @@ -1,10 +1,9 @@ import fs from 'fs/promises' import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { forSnapshot, stripViewer } from '../_util' import { ids } from '../../src/lexicon/lexicons' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' describe('pds profile views', () => { @@ -24,7 +23,7 @@ describe('pds profile views', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() @@ -109,10 +108,10 @@ describe('pds profile views', () => { it('presents avatars & banners', async () => { const avatarImg = await fs.readFile( - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', ) const bannerImg = await fs.readFile( - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', ) const avatarRes = await pdsAgent.api.com.atproto.repo.uploadBlob( avatarImg, diff --git a/packages/bsky/tests/views/reposts.test.ts b/packages/bsky/tests/views/reposts.test.ts index e401cc27a09..4d386121137 100644 --- a/packages/bsky/tests/views/reposts.test.ts +++ b/packages/bsky/tests/views/reposts.test.ts @@ -1,7 +1,6 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { forSnapshot, paginateAll, stripViewer } from '../_util' -import { SeedClient } from '../seeds/client' import repostsSeed from '../seeds/reposts' describe('pds repost views', () => { @@ -18,8 +17,7 @@ describe('pds repost views', () => { dbPostgresSchema: 'bsky_views_reposts', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await repostsSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/bsky/tests/views/suggested-follows.test.ts b/packages/bsky/tests/views/suggested-follows.test.ts index 1d8cb5a91ba..e9aa3248df7 100644 --- a/packages/bsky/tests/views/suggested-follows.test.ts +++ b/packages/bsky/tests/views/suggested-follows.test.ts @@ -1,6 +1,5 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import likesSeed from '../seeds/likes' describe('suggested follows', () => { @@ -15,7 +14,7 @@ describe('suggested follows', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await likesSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/views/suggestions.test.ts b/packages/bsky/tests/views/suggestions.test.ts index e69bd5e377e..2dcadf9e6ad 100644 --- a/packages/bsky/tests/views/suggestions.test.ts +++ b/packages/bsky/tests/views/suggestions.test.ts @@ -1,7 +1,6 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { stripViewer } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' describe('pds user search views', () => { @@ -14,8 +13,7 @@ describe('pds user search views', () => { dbPostgresSchema: 'bsky_views_suggestions', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() await network.bsky.processAll() diff --git a/packages/bsky/tests/views/thread.test.ts b/packages/bsky/tests/views/thread.test.ts index d1c96f38603..bee609f197b 100644 --- a/packages/bsky/tests/views/thread.test.ts +++ b/packages/bsky/tests/views/thread.test.ts @@ -1,8 +1,7 @@ import AtpAgent, { AppBskyFeedGetPostThread } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { forSnapshot, stripViewerFromThread } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import assert from 'assert' import { isThreadViewPost } from '@atproto/api/src/client/types/app/bsky/feed/defs' @@ -22,8 +21,7 @@ describe('pds thread views', () => { dbPostgresSchema: 'bsky_views_thread', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob diff --git a/packages/bsky/tests/views/threadgating.test.ts b/packages/bsky/tests/views/threadgating.test.ts index 7d29addfcf5..8cfaedba44e 100644 --- a/packages/bsky/tests/views/threadgating.test.ts +++ b/packages/bsky/tests/views/threadgating.test.ts @@ -1,11 +1,10 @@ import assert from 'assert' import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { isNotFoundPost, isThreadViewPost, } from '../../src/lexicon/types/app/bsky/feed/defs' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { forSnapshot } from '../_util' @@ -21,7 +20,7 @@ describe('views with thread gating', () => { }) agent = network.bsky.getClient() pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() }) diff --git a/packages/bsky/tests/views/timeline.test.ts b/packages/bsky/tests/views/timeline.test.ts index e7db746c7f3..9cd3f688e33 100644 --- a/packages/bsky/tests/views/timeline.test.ts +++ b/packages/bsky/tests/views/timeline.test.ts @@ -1,9 +1,8 @@ import assert from 'assert' import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { forSnapshot, getOriginator, paginateAll } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' import { FeedAlgorithm } from '../../src/api/app/bsky/util/feed' import { FeedViewPost } from '../../src/lexicon/types/app/bsky/feed/defs' @@ -24,8 +23,7 @@ describe('timeline views', () => { dbPostgresSchema: 'bsky_views_home_feed', }) agent = network.bsky.getClient() - const pdsAgent = network.pds.getClient() - sc = new SeedClient(pdsAgent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice diff --git a/packages/dev-env/package.json b/packages/dev-env/package.json index 8af9282bbb2..28a13b69b1d 100644 --- a/packages/dev-env/package.json +++ b/packages/dev-env/package.json @@ -30,6 +30,7 @@ "@atproto/common-web": "workspace:^", "@atproto/crypto": "workspace:^", "@atproto/identity": "workspace:^", + "@atproto/lexicon": "workspace:^", "@atproto/pds": "workspace:^", "@atproto/syntax": "workspace:^", "@atproto/xrpc-server": "workspace:^", @@ -37,9 +38,10 @@ "@did-plc/server": "^0.0.1", "better-sqlite3": "^7.6.2", "chalk": "^5.0.1", - "dotenv": "^16.0.1", + "dotenv": "^16.0.3", "express": "^4.18.2", "get-port": "^6.1.2", + "multiformats": "^9.9.0", "sharp": "^0.31.2", "uint8arrays": "3.0.0" }, diff --git a/packages/dev-env/src/bin.ts b/packages/dev-env/src/bin.ts index ae89c9abecb..c03f8a76900 100644 --- a/packages/dev-env/src/bin.ts +++ b/packages/dev-env/src/bin.ts @@ -1,3 +1,4 @@ +import './env' import { generateMockSetup } from './mock' import { TestNetwork } from './network' import { mockMailer } from './util' @@ -16,7 +17,7 @@ const run = async () => { const network = await TestNetwork.create({ pds: { port: 2583, - publicUrl: 'http://localhost:2583', + hostname: 'localhost', dbPostgresSchema: 'pds', }, bsky: { diff --git a/packages/bsky/src/env.ts b/packages/dev-env/src/env.ts similarity index 100% rename from packages/bsky/src/env.ts rename to packages/dev-env/src/env.ts diff --git a/packages/dev-env/src/index.ts b/packages/dev-env/src/index.ts index 18406c2cbba..160247f9fbb 100644 --- a/packages/dev-env/src/index.ts +++ b/packages/dev-env/src/index.ts @@ -3,5 +3,7 @@ export * from './network' export * from './network-no-appview' export * from './pds' export * from './plc' +export * from './feed-gen' +export * from './seed-client' export * from './types' export * from './util' diff --git a/packages/dev-env/src/network-no-appview.ts b/packages/dev-env/src/network-no-appview.ts index 24a0b72fb59..25054b2ab4e 100644 --- a/packages/dev-env/src/network-no-appview.ts +++ b/packages/dev-env/src/network-no-appview.ts @@ -4,6 +4,7 @@ import { TestPlc } from './plc' import { TestPds } from './pds' import { mockNetworkUtilities } from './util' import { TestFeedGen } from './feed-gen' +import { SeedClient } from './seed-client' export class TestNetworkNoAppView { feedGens: TestFeedGen[] = [] @@ -15,12 +16,15 @@ export class TestNetworkNoAppView { const dbPostgresUrl = params.dbPostgresUrl || process.env.DB_POSTGRES_URL const dbPostgresSchema = params.dbPostgresSchema || process.env.DB_POSTGRES_SCHEMA + const dbSqliteLocation = + dbPostgresUrl === undefined ? ':memory:' : undefined const plc = await TestPlc.create(params.plc ?? {}) const pds = await TestPds.create({ dbPostgresUrl, dbPostgresSchema, - plcUrl: plc.url, + dbSqliteLocation, + didPlcUrl: plc.url, ...params.pds, }) @@ -37,6 +41,11 @@ export class TestNetworkNoAppView { return fg } + getSeedClient(): SeedClient { + const agent = this.pds.getClient() + return new SeedClient(this, agent) + } + async processAll() { await this.pds.processAll() } diff --git a/packages/dev-env/src/network.ts b/packages/dev-env/src/network.ts index 7b332c1d81c..a6c150f0353 100644 --- a/packages/dev-env/src/network.ts +++ b/packages/dev-env/src/network.ts @@ -46,8 +46,9 @@ export class TestNetwork extends TestNetworkNoAppView { port: pdsPort, dbPostgresUrl, dbPostgresSchema, - plcUrl: plc.url, - bskyAppViewEndpoint: bsky.url, + dbPostgresPoolSize: 5, + didPlcUrl: plc.url, + bskyAppViewUrl: bsky.url, bskyAppViewDid: bsky.ctx.cfg.serverDid, bskyAppViewModeration: true, ...params.pds, diff --git a/packages/dev-env/src/pds.ts b/packages/dev-env/src/pds.ts index 2cdcadfe70c..501ae390cdb 100644 --- a/packages/dev-env/src/pds.ts +++ b/packages/dev-env/src/pds.ts @@ -1,13 +1,17 @@ +import path from 'node:path' +import os from 'node:os' import getPort from 'get-port' import * as ui8 from 'uint8arrays' import * as pds from '@atproto/pds' import { Secp256k1Keypair, randomStr } from '@atproto/crypto' import { AtpAgent } from '@atproto/api' -import { Client as PlcClient } from '@did-plc/lib' -import { DAY, HOUR } from '@atproto/common-web' import { PdsConfig } from './types' import { uniqueLockId } from './util' +const ADMIN_PASSWORD = 'admin-pass' +const MOD_PASSWORD = 'mod-pass' +const TRIAGE_PASSWORD = 'triage-pass' + export class TestPds { constructor( public url: string, @@ -15,74 +19,54 @@ export class TestPds { public server: pds.PDS, ) {} - static async create(cfg: PdsConfig): Promise { - const repoSigningKey = await Secp256k1Keypair.create() - const plcRotationKey = await Secp256k1Keypair.create() - const recoveryKey = await Secp256k1Keypair.create() + static async create(config: PdsConfig): Promise { + const repoSigningKey = await Secp256k1Keypair.create({ exportable: true }) + const repoSigningPriv = ui8.toString(await repoSigningKey.export(), 'hex') + const plcRotationKey = await Secp256k1Keypair.create({ exportable: true }) + const plcRotationPriv = ui8.toString(await plcRotationKey.export(), 'hex') + const recoveryKey = (await Secp256k1Keypair.create()).did() - const port = cfg.port || (await getPort()) + const port = config.port || (await getPort()) const url = `http://localhost:${port}` - const plcClient = new PlcClient(cfg.plcUrl) - - const serverDid = await plcClient.createDid({ - signingKey: repoSigningKey.did(), - rotationKeys: [recoveryKey.did(), plcRotationKey.did()], - handle: 'pds.test', - pds: `http://localhost:${port}`, - signer: plcRotationKey, - }) - - const config = new pds.ServerConfig({ - debugMode: true, - version: '0.0.0', - scheme: 'http', + + const blobstoreLoc = path.join(os.tmpdir(), randomStr(8, 'base32')) + + const env: pds.ServerEnvironment = { port, - hostname: 'localhost', - serverDid, - recoveryKey: recoveryKey.did(), - adminPassword: 'admin-pass', - moderatorPassword: 'moderator-pass', - triagePassword: 'triage-pass', - inviteRequired: false, - userInviteInterval: null, - userInviteEpoch: 0, - didPlcUrl: cfg.plcUrl, - didCacheMaxTTL: DAY, - didCacheStaleTTL: HOUR, + blobstoreDiskLocation: blobstoreLoc, + recoveryDidKey: recoveryKey, + adminPassword: ADMIN_PASSWORD, + moderatorPassword: MOD_PASSWORD, + triagePassword: TRIAGE_PASSWORD, jwtSecret: 'jwt-secret', - availableUserDomains: ['.test'], - rateLimitsEnabled: false, - appUrlPasswordReset: 'app://forgot-password', - emailNoReplyAddress: 'noreply@blueskyweb.xyz', - publicUrl: 'https://pds.public.url', - dbPostgresUrl: cfg.dbPostgresUrl, - maxSubscriptionBuffer: 200, - repoBackfillLimitMs: 1000 * 60 * 60, // 1hr + serviceHandleDomains: ['.test'], sequencerLeaderLockId: uniqueLockId(), - dbTxLockNonce: await randomStr(32, 'base32'), - bskyAppViewEndpoint: cfg.bskyAppViewEndpoint ?? 'http://fake_address', - bskyAppViewDid: cfg.bskyAppViewDid ?? 'did:example:fake', + bskyAppViewUrl: 'https://appview.invalid', + bskyAppViewDid: 'did:example:invalid', bskyAppViewCdnUrlPattern: 'http://cdn.appview.com/%s/%s/%s', - ...cfg, - }) - - const blobstore = new pds.MemoryBlobStore() - const db = config.dbPostgresUrl - ? pds.Database.postgres({ - url: config.dbPostgresUrl, - schema: config.dbPostgresSchema, - txLockNonce: config.dbTxLockNonce, - }) - : pds.Database.memory() - await db.migrateToLatestOrThrow() - - const server = pds.PDS.create({ - db, - blobstore, - repoSigningKey, - plcRotationKey, - config, - }) + repoSigningKeyK256PrivateKeyHex: repoSigningPriv, + plcRotationKeyK256PrivateKeyHex: plcRotationPriv, + inviteRequired: false, + ...config, + } + const cfg = pds.envToCfg(env) + const secrets = pds.envToSecrets(env) + + const server = await pds.PDS.create(cfg, secrets) + + // Separate migration db on postgres in case migration changes some + // connection state that we need in the tests, e.g. "alter database ... set ..." + const migrationDb = + cfg.db.dialect === 'pg' + ? pds.Database.postgres({ + url: cfg.db.url, + schema: cfg.db.schema, + }) + : server.ctx.db + await migrationDb.migrateToLatestOrThrow() + if (migrationDb !== server.ctx.db) { + await migrationDb.close() + } await server.start() @@ -100,10 +84,10 @@ export class TestPds { adminAuth(role: 'admin' | 'moderator' | 'triage' = 'admin'): string { const password = role === 'triage' - ? this.ctx.cfg.triagePassword + ? TRIAGE_PASSWORD : role === 'moderator' - ? this.ctx.cfg.moderatorPassword - : this.ctx.cfg.adminPassword + ? MOD_PASSWORD + : ADMIN_PASSWORD return ( 'Basic ' + ui8.toString(ui8.fromString(`admin:${password}`, 'utf8'), 'base64pad') diff --git a/packages/pds/tests/seeds/client.ts b/packages/dev-env/src/seed-client.ts similarity index 96% rename from packages/pds/tests/seeds/client.ts rename to packages/dev-env/src/seed-client.ts index c52f499f6ab..b9b1eded96a 100644 --- a/packages/pds/tests/seeds/client.ts +++ b/packages/dev-env/src/seed-client.ts @@ -9,8 +9,7 @@ import { Record as LikeRecord } from '@atproto/api/src/client/types/app/bsky/fee import { Record as FollowRecord } from '@atproto/api/src/client/types/app/bsky/graph/follow' import { AtUri } from '@atproto/syntax' import { BlobRef } from '@atproto/lexicon' -import { adminAuth } from '../_util' -import { ids } from '../../src/lexicon/lexicons' +import { TestNetworkNoAppView } from './network-no-appview' // Makes it simple to create data via the XRPC client, // and keeps track of all created data in memory for convenience. @@ -83,7 +82,7 @@ export class SeedClient { > dids: Record - constructor(public agent: AtpAgent) { + constructor(public network: TestNetworkNoAppView, public agent: AtpAgent) { this.accounts = {} this.profiles = {} this.follows = {} @@ -129,9 +128,7 @@ export class SeedClient { description: string, selfLabels?: string[], ) { - AVATAR_IMG ??= await fs.readFile( - 'tests/image/fixtures/key-portrait-small.jpg', - ) + AVATAR_IMG ??= await fs.readFile('tests/sample-img/key-portrait-small.jpg') let avatarBlob { @@ -172,7 +169,7 @@ export class SeedClient { const res = await this.agent.api.com.atproto.repo.putRecord( { repo: by, - collection: ids.AppBskyActorProfile, + collection: 'app.bsky.actor.profile', rkey: 'self', record, }, @@ -438,7 +435,7 @@ export class SeedClient { { action, subject, createdBy, reason }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: this.adminAuthHeaders(), }, ) return result.data @@ -455,7 +452,7 @@ export class SeedClient { { id, reason, createdBy }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: this.adminAuthHeaders(), }, ) return result.data @@ -472,7 +469,7 @@ export class SeedClient { { actionId, createdBy, reportIds }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: this.adminAuthHeaders(), }, ) return result.data @@ -495,6 +492,10 @@ export class SeedClient { return result.data } + adminAuthHeaders() { + return this.network.pds.adminAuthHeaders() + } + getHeaders(did: string) { return SeedClient.getHeaders(this.accounts[did].accessJwt) } diff --git a/packages/dev-env/src/types.ts b/packages/dev-env/src/types.ts index a1642f4751c..3bbcaf15257 100644 --- a/packages/dev-env/src/types.ts +++ b/packages/dev-env/src/types.ts @@ -7,8 +7,8 @@ export type PlcConfig = { version?: string } -export type PdsConfig = Partial & { - plcUrl: string +export type PdsConfig = Partial & { + didPlcUrl: string migration?: string } @@ -29,6 +29,6 @@ export type TestServerParams = { dbPostgresUrl: string dbPostgresSchema: string pds: Partial - plc: Partial + plc: Partial bsky: Partial } diff --git a/packages/dev-env/src/util.ts b/packages/dev-env/src/util.ts index 0e62a733a8a..7e3f275ca98 100644 --- a/packages/dev-env/src/util.ts +++ b/packages/dev-env/src/util.ts @@ -21,18 +21,21 @@ export const mockResolvers = (idResolver: IdResolver, pds: TestPds) => { const service = result?.service?.find((svc) => svc.id === '#atproto_pds') if (typeof service?.serviceEndpoint === 'string') { service.serviceEndpoint = service.serviceEndpoint.replace( - pds.ctx.cfg.publicUrl, + pds.ctx.cfg.service.publicUrl, `http://localhost:${pds.port}`, ) } return result } + const origResolveHandleDns = idResolver.handle.resolveDns idResolver.handle.resolve = async (handle: string) => { - const isPdsHandle = pds.ctx.cfg.availableUserDomains.some((domain) => - handle.endsWith(domain), + const isPdsHandle = pds.ctx.cfg.identity.serviceHandleDomains.some( + (domain) => handle.endsWith(domain), ) - if (!isPdsHandle) return undefined + if (!isPdsHandle) { + return origResolveHandleDns.call(idResolver.handle, handle) + } const url = `${pds.url}/.well-known/atproto-did` try { diff --git a/packages/pds/bench/sequencer.bench.ts b/packages/pds/bench/sequencer.bench.ts index 00c3e2c21c4..b7b054e9d8a 100644 --- a/packages/pds/bench/sequencer.bench.ts +++ b/packages/pds/bench/sequencer.bench.ts @@ -1,37 +1,38 @@ import { randomBytes } from '@atproto/crypto' import { cborEncode } from '@atproto/common' -import { TestServerInfo, runTestServer } from '../tests/_util' import { randomCid } from '@atproto/repo/tests/_util' import { BlockMap, blocksToCarFile } from '@atproto/repo' import { byFrame } from '@atproto/xrpc-server' import { WebSocket } from 'ws' import { Database } from '../src' +import { TestNetworkNoAppView } from '@atproto/dev-env' describe('sequencer bench', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let db: Database beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'sequencer_bench', - maxSubscriptionBuffer: 20000, + pds: { + maxSubscriptionBuffer: 20000, + }, }) - if (!server.ctx.cfg.dbPostgresUrl) { + if (network.pds.ctx.cfg.db.dialect !== 'pg') { throw new Error('no postgres url') } db = Database.postgres({ - url: server.ctx.cfg.dbPostgresUrl, - schema: server.ctx.cfg.dbPostgresSchema, - txLockNonce: server.ctx.cfg.dbTxLockNonce, + url: network.pds.ctx.cfg.db.url, + schema: network.pds.ctx.cfg.db.schema, poolSize: 50, }) - server.ctx.sequencerLeader?.destroy() + network.pds.ctx.sequencerLeader?.destroy() }) afterAll(async () => { - await server.close() + await network.close() }) const doWrites = async (batches: number, batchSize: number) => { @@ -78,7 +79,7 @@ describe('sequencer bench', () => { totalToRead: number, cursor?: number, ): Promise => { - const serverHost = server.url.replace('http://', '') + const serverHost = network.pds.url.replace('http://', '') let url = `ws://${serverHost}/xrpc/com.atproto.sync.subscribeRepos` if (cursor !== undefined) { url += `?cursor=${cursor}` @@ -114,7 +115,7 @@ describe('sequencer bench', () => { await doWrites(BATCHES, BATCH_SIZE) const setup = Date.now() - await server.ctx.sequencerLeader?.sequenceOutgoing() + await network.pds.ctx.sequencerLeader?.sequenceOutgoing() const sequencingTime = Date.now() - setup const liveTailTime = await readAllPromise diff --git a/packages/pds/build.js b/packages/pds/build.js index 82b7c051236..8686a6e4bf9 100644 --- a/packages/pds/build.js +++ b/packages/pds/build.js @@ -6,7 +6,7 @@ const buildShallow = require('esbuild').build({ logLevel: 'info', - entryPoints: ['src/index.ts', 'src/bin.ts', 'src/db/index.ts'], + entryPoints: ['src/index.ts', 'src/db/index.ts'], bundle: true, sourcemap: true, outdir: 'dist', diff --git a/packages/pds/example.dev.env b/packages/pds/example.dev.env deleted file mode 100644 index 561947cf22d..00000000000 --- a/packages/pds/example.dev.env +++ /dev/null @@ -1,6 +0,0 @@ -DB_POSTGRES_URL="postgres://bsky:yksb@localhost/pds_dev" -DEBUG_MODE=1 -LOG_ENABLED="true" -LOG_LEVEL=debug -LOG_DESTINATION=1 -AVAILABLE_USER_DOMAINS=".test,.dev.bsky.dev" diff --git a/packages/pds/example.env b/packages/pds/example.env new file mode 100644 index 00000000000..fc3c3520eb0 --- /dev/null +++ b/packages/pds/example.env @@ -0,0 +1,24 @@ +# See more env options in src/config/env.ts +# Hostname - the public domain that you intend to deploy your service at +PDS_HOSTNAME="example.com" + +# Database config - use one or the other +PDS_DB_SQLITE_LOCATION="db.test" +# PDS_DB_POSTGRES_URL="postgresql://pg:password@localhost:5433/postgres" + +# Blobstore - filesystem location to store uploaded blobs +PDS_BLOBSTORE_DISK_LOCATION="blobs" + +# Private keys - these are each expected to be 64 char hex strings (256 bit) +PDS_REPO_SIGNING_KEY_K256_PRIVATE_KEY_HEX="3ee68..." +PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX="e049f..." + +# Secrets - update to secure high-entropy strings +PDS_JWT_SECRET="jwt-secret" +PDS_ADMIN_PASSWORD="admin-pass" + +# Environment - example is for sandbox +PDS_DID_PLC_URL="https://plc.bsky-sandbox.dev" +PDS_BSKY_APP_VIEW_ENDPOINT="https://api.bsky-sandbox.dev" +PDS_BSKY_APP_VIEW_DID="did:web:api.bsky-sandbox.dev" +PDS_CRAWLERS="https://bgs.bsky-sandbox.dev" \ No newline at end of file diff --git a/packages/pds/package.json b/packages/pds/package.json index b6cacf93672..4c22a61133b 100644 --- a/packages/pds/package.json +++ b/packages/pds/package.json @@ -1,6 +1,6 @@ { "name": "@atproto/pds", - "version": "0.1.20", + "version": "0.3.0-beta.3", "license": "MIT", "description": "Reference implementation of atproto Personal Data Server (PDS)", "keywords": [ @@ -23,9 +23,8 @@ "codegen": "lex gen-server ./src/lexicon ../../lexicons/com/atproto/*/* ../../lexicons/app/bsky/*/*", "build": "node ./build.js", "postbuild": "tsc --build tsconfig.build.json", - "update-main-to-dist": "node ../../update-main-to-dist.js packages/pds", - "start": "node dist/bin.js", "test": "../dev-infra/with-test-redis-and-db.sh jest", + "update-main-to-dist": "node ../../update-main-to-dist.js packages/pds", "bench": "../dev-infra/with-test-redis-and-db.sh jest --config jest.bench.config.js", "test:sqlite": "jest --testPathIgnorePatterns /tests/proxied/*", "test:log": "tail -50 test.log | pino-pretty", @@ -34,6 +33,7 @@ }, "dependencies": { "@atproto/api": "workspace:^", + "@atproto/aws": "workspace:^", "@atproto/common": "workspace:^", "@atproto/crypto": "workspace:^", "@atproto/identity": "workspace:^", @@ -47,7 +47,7 @@ "bytes": "^3.1.2", "compression": "^1.7.4", "cors": "^2.8.5", - "dotenv": "^16.0.0", + "disposable-email": "^0.2.3", "express": "^4.17.2", "express-async-errors": "^3.1.1", "file-type": "^16.5.4", @@ -77,6 +77,7 @@ "@atproto/lex-cli": "workspace:^", "@did-plc/server": "^0.0.1", "@types/cors": "^2.8.12", + "@types/disposable-email": "^0.2.0", "@types/express": "^4.17.13", "@types/express-serve-static-core": "^4.17.36", "@types/jsonwebtoken": "^8.5.9", @@ -84,6 +85,7 @@ "@types/pg": "^8.6.6", "@types/qs": "^6.9.7", "@types/sharp": "^0.31.0", - "axios": "^0.27.2" + "axios": "^0.27.2", + "ws": "^8.12.0" } } diff --git a/packages/pds/src/api/app/bsky/actor/getProfile.ts b/packages/pds/src/api/app/bsky/actor/getProfile.ts index 52858515827..c200e1dd75f 100644 --- a/packages/pds/src/api/app/bsky/actor/getProfile.ts +++ b/packages/pds/src/api/app/bsky/actor/getProfile.ts @@ -11,7 +11,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ req, auth, params }) => { const requester = auth.credentials.type === 'access' ? auth.credentials.did : null - const res = await ctx.appviewAgent.api.app.bsky.actor.getProfile( + const res = await ctx.appViewAgent.api.app.bsky.actor.getProfile( params, requester ? await ctx.serviceAuthHeaders(requester) : authPassthru(req), ) diff --git a/packages/pds/src/api/app/bsky/actor/getProfiles.ts b/packages/pds/src/api/app/bsky/actor/getProfiles.ts index 46af4b08a0c..ebec9e36938 100644 --- a/packages/pds/src/api/app/bsky/actor/getProfiles.ts +++ b/packages/pds/src/api/app/bsky/actor/getProfiles.ts @@ -9,7 +9,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ auth, params }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.actor.getProfiles( + const res = await ctx.appViewAgent.api.app.bsky.actor.getProfiles( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/actor/getSuggestions.ts b/packages/pds/src/api/app/bsky/actor/getSuggestions.ts index c3ceb16cf14..e6c72e5c830 100644 --- a/packages/pds/src/api/app/bsky/actor/getSuggestions.ts +++ b/packages/pds/src/api/app/bsky/actor/getSuggestions.ts @@ -1,12 +1,12 @@ -import AppContext from '../../../../context' import { Server } from '../../../../lexicon' +import AppContext from '../../../../context' export default function (server: Server, ctx: AppContext) { server.app.bsky.actor.getSuggestions({ auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.actor.getSuggestions( + const res = await ctx.appViewAgent.api.app.bsky.actor.getSuggestions( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/actor/index.ts b/packages/pds/src/api/app/bsky/actor/index.ts index db69616f7e8..041744899f6 100644 --- a/packages/pds/src/api/app/bsky/actor/index.ts +++ b/packages/pds/src/api/app/bsky/actor/index.ts @@ -1,5 +1,6 @@ import { Server } from '../../../../lexicon' import AppContext from '../../../../context' + import getPreferences from './getPreferences' import getProfile from './getProfile' import getProfiles from './getProfiles' diff --git a/packages/pds/src/api/app/bsky/actor/searchActors.ts b/packages/pds/src/api/app/bsky/actor/searchActors.ts index 921f4363bfc..3f1bd2355d6 100644 --- a/packages/pds/src/api/app/bsky/actor/searchActors.ts +++ b/packages/pds/src/api/app/bsky/actor/searchActors.ts @@ -1,12 +1,12 @@ -import AppContext from '../../../../context' import { Server } from '../../../../lexicon' +import AppContext from '../../../../context' export default function (server: Server, ctx: AppContext) { server.app.bsky.actor.searchActors({ auth: ctx.accessVerifier, - handler: async ({ auth, params }) => { + handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.actor.searchActors( + const res = await ctx.appViewAgent.api.app.bsky.actor.searchActors( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/actor/searchActorsTypeahead.ts b/packages/pds/src/api/app/bsky/actor/searchActorsTypeahead.ts index d8ef8f72dda..a637aea69c7 100644 --- a/packages/pds/src/api/app/bsky/actor/searchActorsTypeahead.ts +++ b/packages/pds/src/api/app/bsky/actor/searchActorsTypeahead.ts @@ -1,5 +1,5 @@ -import AppContext from '../../../../context' import { Server } from '../../../../lexicon' +import AppContext from '../../../../context' export default function (server: Server, ctx: AppContext) { server.app.bsky.actor.searchActorsTypeahead({ @@ -7,7 +7,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ params, auth }) => { const requester = auth.credentials.did const res = - await ctx.appviewAgent.api.app.bsky.actor.searchActorsTypeahead( + await ctx.appViewAgent.api.app.bsky.actor.searchActorsTypeahead( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getActorFeeds.ts b/packages/pds/src/api/app/bsky/feed/getActorFeeds.ts index ec77754b4b2..da99617178f 100644 --- a/packages/pds/src/api/app/bsky/feed/getActorFeeds.ts +++ b/packages/pds/src/api/app/bsky/feed/getActorFeeds.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ auth, params }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getActorFeeds( + const res = await ctx.appViewAgent.api.app.bsky.feed.getActorFeeds( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getActorLikes.ts b/packages/pds/src/api/app/bsky/feed/getActorLikes.ts index 53557c6ae4c..9c0c38c5a20 100644 --- a/packages/pds/src/api/app/bsky/feed/getActorLikes.ts +++ b/packages/pds/src/api/app/bsky/feed/getActorLikes.ts @@ -12,7 +12,7 @@ export default function (server: Server, ctx: AppContext) { const requester = auth.credentials.type === 'access' ? auth.credentials.did : null - const res = await ctx.appviewAgent.api.app.bsky.feed.getActorLikes( + const res = await ctx.appViewAgent.api.app.bsky.feed.getActorLikes( params, requester ? await ctx.serviceAuthHeaders(requester) : authPassthru(req), ) diff --git a/packages/pds/src/api/app/bsky/feed/getAuthorFeed.ts b/packages/pds/src/api/app/bsky/feed/getAuthorFeed.ts index 7237a2df755..6563812fb9a 100644 --- a/packages/pds/src/api/app/bsky/feed/getAuthorFeed.ts +++ b/packages/pds/src/api/app/bsky/feed/getAuthorFeed.ts @@ -12,7 +12,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ req, params, auth }) => { const requester = auth.credentials.type === 'access' ? auth.credentials.did : null - const res = await ctx.appviewAgent.api.app.bsky.feed.getAuthorFeed( + const res = await ctx.appViewAgent.api.app.bsky.feed.getAuthorFeed( params, requester ? await ctx.serviceAuthHeaders(requester) : authPassthru(req), ) diff --git a/packages/pds/src/api/app/bsky/feed/getFeed.ts b/packages/pds/src/api/app/bsky/feed/getFeed.ts index 6ed14b0546c..051b0c7bcdf 100644 --- a/packages/pds/src/api/app/bsky/feed/getFeed.ts +++ b/packages/pds/src/api/app/bsky/feed/getFeed.ts @@ -8,11 +8,11 @@ export default function (server: Server, ctx: AppContext) { const requester = auth.credentials.did const { data: feed } = - await ctx.appviewAgent.api.app.bsky.feed.getFeedGenerator( + await ctx.appViewAgent.api.app.bsky.feed.getFeedGenerator( { feed: params.feed }, await ctx.serviceAuthHeaders(requester), ) - const res = await ctx.appviewAgent.api.app.bsky.feed.getFeed( + const res = await ctx.appViewAgent.api.app.bsky.feed.getFeed( params, await ctx.serviceAuthHeaders(requester, feed.view.did), ) diff --git a/packages/pds/src/api/app/bsky/feed/getFeedGenerator.ts b/packages/pds/src/api/app/bsky/feed/getFeedGenerator.ts index b9451ca16c3..28c404b58e8 100644 --- a/packages/pds/src/api/app/bsky/feed/getFeedGenerator.ts +++ b/packages/pds/src/api/app/bsky/feed/getFeedGenerator.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getFeedGenerator( + const res = await ctx.appViewAgent.api.app.bsky.feed.getFeedGenerator( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getFeedGenerators.ts b/packages/pds/src/api/app/bsky/feed/getFeedGenerators.ts index 1d085830004..12cf9e91c0a 100644 --- a/packages/pds/src/api/app/bsky/feed/getFeedGenerators.ts +++ b/packages/pds/src/api/app/bsky/feed/getFeedGenerators.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getFeedGenerators( + const res = await ctx.appViewAgent.api.app.bsky.feed.getFeedGenerators( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getLikes.ts b/packages/pds/src/api/app/bsky/feed/getLikes.ts index 75197acbcc8..771cc511cd4 100644 --- a/packages/pds/src/api/app/bsky/feed/getLikes.ts +++ b/packages/pds/src/api/app/bsky/feed/getLikes.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getLikes( + const res = await ctx.appViewAgent.api.app.bsky.feed.getLikes( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getListFeed.ts b/packages/pds/src/api/app/bsky/feed/getListFeed.ts index 7344b2476ba..34b8630a933 100644 --- a/packages/pds/src/api/app/bsky/feed/getListFeed.ts +++ b/packages/pds/src/api/app/bsky/feed/getListFeed.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ auth, params }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getListFeed( + const res = await ctx.appViewAgent.api.app.bsky.feed.getListFeed( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getPostThread.ts b/packages/pds/src/api/app/bsky/feed/getPostThread.ts index e8dd062da14..270c1044497 100644 --- a/packages/pds/src/api/app/bsky/feed/getPostThread.ts +++ b/packages/pds/src/api/app/bsky/feed/getPostThread.ts @@ -22,17 +22,33 @@ import { getRepoRev, handleReadAfterWrite, } from '../util/read-after-write' +import { authPassthru } from '../../../com/atproto/admin/util' export default function (server: Server, ctx: AppContext) { server.app.bsky.feed.getPostThread({ - auth: ctx.accessVerifier, - handler: async ({ params, auth }) => { - const requester = auth.credentials.did + auth: ctx.accessOrRoleVerifier, + handler: async ({ req, params, auth }) => { + const requester = + auth.credentials.type === 'access' ? auth.credentials.did : null + + if (!requester) { + const res = await ctx.appViewAgent.api.app.bsky.feed.getPostThread( + params, + authPassthru(req), + ) + + return { + encoding: 'application/json', + body: res.data, + } + } + try { - const res = await ctx.appviewAgent.api.app.bsky.feed.getPostThread( + const res = await ctx.appViewAgent.api.app.bsky.feed.getPostThread( params, await ctx.serviceAuthHeaders(requester), ) + return await handleReadAfterWrite( ctx, requester, @@ -185,7 +201,7 @@ const readAfterWriteNotFound = async ( const highestParent = getHighestParent(thread) if (highestParent) { try { - const parentsRes = await ctx.appviewAgent.api.app.bsky.feed.getPostThread( + const parentsRes = await ctx.appViewAgent.api.app.bsky.feed.getPostThread( { uri: highestParent, parentHeight: params.parentHeight, depth: 0 }, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getPosts.ts b/packages/pds/src/api/app/bsky/feed/getPosts.ts index 05173c48ef9..1b755450f63 100644 --- a/packages/pds/src/api/app/bsky/feed/getPosts.ts +++ b/packages/pds/src/api/app/bsky/feed/getPosts.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getPosts( + const res = await ctx.appViewAgent.api.app.bsky.feed.getPosts( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getRepostedBy.ts b/packages/pds/src/api/app/bsky/feed/getRepostedBy.ts index 44a5b15191d..30e72b434e6 100644 --- a/packages/pds/src/api/app/bsky/feed/getRepostedBy.ts +++ b/packages/pds/src/api/app/bsky/feed/getRepostedBy.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getRepostedBy( + const res = await ctx.appViewAgent.api.app.bsky.feed.getRepostedBy( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getSuggestedFeeds.ts b/packages/pds/src/api/app/bsky/feed/getSuggestedFeeds.ts index 9c8d338104b..733405b3b42 100644 --- a/packages/pds/src/api/app/bsky/feed/getSuggestedFeeds.ts +++ b/packages/pds/src/api/app/bsky/feed/getSuggestedFeeds.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ auth, params }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getSuggestedFeeds( + const res = await ctx.appViewAgent.api.app.bsky.feed.getSuggestedFeeds( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/feed/getTimeline.ts b/packages/pds/src/api/app/bsky/feed/getTimeline.ts index 7d4e52ce918..2c3e2ed44d6 100644 --- a/packages/pds/src/api/app/bsky/feed/getTimeline.ts +++ b/packages/pds/src/api/app/bsky/feed/getTimeline.ts @@ -9,7 +9,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.feed.getTimeline( + const res = await ctx.appViewAgent.api.app.bsky.feed.getTimeline( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getBlocks.ts b/packages/pds/src/api/app/bsky/graph/getBlocks.ts index 284dafd3034..f66eb64b945 100644 --- a/packages/pds/src/api/app/bsky/graph/getBlocks.ts +++ b/packages/pds/src/api/app/bsky/graph/getBlocks.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.graph.getBlocks( + const res = await ctx.appViewAgent.api.app.bsky.graph.getBlocks( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getFollowers.ts b/packages/pds/src/api/app/bsky/graph/getFollowers.ts index da0541d7e75..389f92d4e14 100644 --- a/packages/pds/src/api/app/bsky/graph/getFollowers.ts +++ b/packages/pds/src/api/app/bsky/graph/getFollowers.ts @@ -8,7 +8,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ req, params, auth }) => { const requester = auth.credentials.type === 'access' ? auth.credentials.did : null - const res = await ctx.appviewAgent.api.app.bsky.graph.getFollowers( + const res = await ctx.appViewAgent.api.app.bsky.graph.getFollowers( params, requester ? await ctx.serviceAuthHeaders(requester) : authPassthru(req), ) diff --git a/packages/pds/src/api/app/bsky/graph/getFollows.ts b/packages/pds/src/api/app/bsky/graph/getFollows.ts index f49c812f9ca..343fd81d414 100644 --- a/packages/pds/src/api/app/bsky/graph/getFollows.ts +++ b/packages/pds/src/api/app/bsky/graph/getFollows.ts @@ -8,7 +8,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ req, params, auth }) => { const requester = auth.credentials.type === 'access' ? auth.credentials.did : null - const res = await ctx.appviewAgent.api.app.bsky.graph.getFollows( + const res = await ctx.appViewAgent.api.app.bsky.graph.getFollows( params, requester ? await ctx.serviceAuthHeaders(requester) : authPassthru(req), ) diff --git a/packages/pds/src/api/app/bsky/graph/getList.ts b/packages/pds/src/api/app/bsky/graph/getList.ts index 5fd3c93df75..061d6759c2c 100644 --- a/packages/pds/src/api/app/bsky/graph/getList.ts +++ b/packages/pds/src/api/app/bsky/graph/getList.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.graph.getList( + const res = await ctx.appViewAgent.api.app.bsky.graph.getList( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getListBlocks.ts b/packages/pds/src/api/app/bsky/graph/getListBlocks.ts index 04fd55a324e..83975782fa4 100644 --- a/packages/pds/src/api/app/bsky/graph/getListBlocks.ts +++ b/packages/pds/src/api/app/bsky/graph/getListBlocks.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ auth, params }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.graph.getListBlocks( + const res = await ctx.appViewAgent.api.app.bsky.graph.getListBlocks( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getListMutes.ts b/packages/pds/src/api/app/bsky/graph/getListMutes.ts index e0a624a3864..05f6ce1ab09 100644 --- a/packages/pds/src/api/app/bsky/graph/getListMutes.ts +++ b/packages/pds/src/api/app/bsky/graph/getListMutes.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.graph.getListMutes( + const res = await ctx.appViewAgent.api.app.bsky.graph.getListMutes( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getLists.ts b/packages/pds/src/api/app/bsky/graph/getLists.ts index e43a8d2b1d6..6c8f6452ea4 100644 --- a/packages/pds/src/api/app/bsky/graph/getLists.ts +++ b/packages/pds/src/api/app/bsky/graph/getLists.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ params, auth }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.graph.getLists( + const res = await ctx.appViewAgent.api.app.bsky.graph.getLists( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getMutes.ts b/packages/pds/src/api/app/bsky/graph/getMutes.ts index 9aa6b74445c..12ff1a032a0 100644 --- a/packages/pds/src/api/app/bsky/graph/getMutes.ts +++ b/packages/pds/src/api/app/bsky/graph/getMutes.ts @@ -6,7 +6,7 @@ export default function (server: Server, ctx: AppContext) { auth: ctx.accessVerifier, handler: async ({ auth, params }) => { const requester = auth.credentials.did - const res = await ctx.appviewAgent.api.app.bsky.graph.getMutes( + const res = await ctx.appViewAgent.api.app.bsky.graph.getMutes( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/getSuggestedFollowsByActor.ts b/packages/pds/src/api/app/bsky/graph/getSuggestedFollowsByActor.ts index 1db1c7f498f..53125cbc517 100644 --- a/packages/pds/src/api/app/bsky/graph/getSuggestedFollowsByActor.ts +++ b/packages/pds/src/api/app/bsky/graph/getSuggestedFollowsByActor.ts @@ -7,7 +7,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ auth, params }) => { const requester = auth.credentials.did const res = - await ctx.appviewAgent.api.app.bsky.graph.getSuggestedFollowsByActor( + await ctx.appViewAgent.api.app.bsky.graph.getSuggestedFollowsByActor( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/graph/muteActor.ts b/packages/pds/src/api/app/bsky/graph/muteActor.ts index 44d4747f5e9..9f753bac926 100644 --- a/packages/pds/src/api/app/bsky/graph/muteActor.ts +++ b/packages/pds/src/api/app/bsky/graph/muteActor.ts @@ -1,4 +1,3 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' @@ -6,28 +5,11 @@ export default function (server: Server, ctx: AppContext) { server.app.bsky.graph.muteActor({ auth: ctx.accessVerifier, handler: async ({ auth, input }) => { - const { actor } = input.body const requester = auth.credentials.did - const { db, services } = ctx - const subject = await services.account(db).getAccount(actor) - if (!subject) { - throw new InvalidRequestError(`Actor not found: ${actor}`) - } - if (subject.did === requester) { - throw new InvalidRequestError('Cannot mute oneself') - } - - if (ctx.canProxyWrite()) { - await ctx.appviewAgent.api.app.bsky.graph.muteActor(input.body, { - ...(await ctx.serviceAuthHeaders(requester)), - encoding: 'application/json', - }) - } - - await services.account(db).mute({ - did: subject.did, - mutedByDid: requester, + await ctx.appViewAgent.api.app.bsky.graph.muteActor(input.body, { + ...(await ctx.serviceAuthHeaders(requester)), + encoding: 'application/json', }) }, }) diff --git a/packages/pds/src/api/app/bsky/graph/muteActorList.ts b/packages/pds/src/api/app/bsky/graph/muteActorList.ts index e554f7fce8b..441571a26b9 100644 --- a/packages/pds/src/api/app/bsky/graph/muteActorList.ts +++ b/packages/pds/src/api/app/bsky/graph/muteActorList.ts @@ -1,32 +1,15 @@ import { Server } from '../../../../lexicon' -import * as lex from '../../../../lexicon/lexicons' import AppContext from '../../../../context' -import { AtUri } from '@atproto/syntax' -import { InvalidRequestError } from '@atproto/xrpc-server' export default function (server: Server, ctx: AppContext) { server.app.bsky.graph.muteActorList({ auth: ctx.accessVerifier, handler: async ({ auth, input }) => { - const { list } = input.body const requester = auth.credentials.did - const listUri = new AtUri(list) - const collId = lex.ids.AppBskyGraphList - if (listUri.collection !== collId) { - throw new InvalidRequestError(`Invalid collection: expected: ${collId}`) - } - - if (ctx.canProxyWrite()) { - await ctx.appviewAgent.api.app.bsky.graph.muteActorList(input.body, { - ...(await ctx.serviceAuthHeaders(requester)), - encoding: 'application/json', - }) - } - - await ctx.services.account(ctx.db).muteActorList({ - list, - mutedByDid: requester, + await ctx.appViewAgent.api.app.bsky.graph.muteActorList(input.body, { + ...(await ctx.serviceAuthHeaders(requester)), + encoding: 'application/json', }) }, }) diff --git a/packages/pds/src/api/app/bsky/graph/unmuteActor.ts b/packages/pds/src/api/app/bsky/graph/unmuteActor.ts index 84819cc1e15..586b12565d6 100644 --- a/packages/pds/src/api/app/bsky/graph/unmuteActor.ts +++ b/packages/pds/src/api/app/bsky/graph/unmuteActor.ts @@ -1,30 +1,15 @@ import { Server } from '../../../../lexicon' -import { InvalidRequestError } from '@atproto/xrpc-server' import AppContext from '../../../../context' export default function (server: Server, ctx: AppContext) { server.app.bsky.graph.unmuteActor({ auth: ctx.accessVerifier, handler: async ({ auth, input }) => { - const { actor } = input.body const requester = auth.credentials.did - const { db, services } = ctx - const subject = await services.account(db).getAccount(actor) - if (!subject) { - throw new InvalidRequestError(`Actor not found: ${actor}`) - } - - if (ctx.canProxyWrite()) { - await ctx.appviewAgent.api.app.bsky.graph.unmuteActor(input.body, { - ...(await ctx.serviceAuthHeaders(requester)), - encoding: 'application/json', - }) - } - - await services.account(db).unmute({ - did: subject.did, - mutedByDid: requester, + await ctx.appViewAgent.api.app.bsky.graph.unmuteActor(input.body, { + ...(await ctx.serviceAuthHeaders(requester)), + encoding: 'application/json', }) }, }) diff --git a/packages/pds/src/api/app/bsky/graph/unmuteActorList.ts b/packages/pds/src/api/app/bsky/graph/unmuteActorList.ts index ce3c1a4b254..e8ba9f8c4d4 100644 --- a/packages/pds/src/api/app/bsky/graph/unmuteActorList.ts +++ b/packages/pds/src/api/app/bsky/graph/unmuteActorList.ts @@ -5,19 +5,11 @@ export default function (server: Server, ctx: AppContext) { server.app.bsky.graph.unmuteActorList({ auth: ctx.accessVerifier, handler: async ({ auth, input }) => { - const { list } = input.body const requester = auth.credentials.did - if (ctx.canProxyWrite()) { - await ctx.appviewAgent.api.app.bsky.graph.unmuteActorList(input.body, { - ...(await ctx.serviceAuthHeaders(requester)), - encoding: 'application/json', - }) - } - - await ctx.services.account(ctx.db).unmuteActorList({ - list, - mutedByDid: requester, + await ctx.appViewAgent.api.app.bsky.graph.unmuteActorList(input.body, { + ...(await ctx.serviceAuthHeaders(requester)), + encoding: 'application/json', }) }, }) diff --git a/packages/pds/src/api/app/bsky/notification/getUnreadCount.ts b/packages/pds/src/api/app/bsky/notification/getUnreadCount.ts index e8100b183c5..c8b723403d5 100644 --- a/packages/pds/src/api/app/bsky/notification/getUnreadCount.ts +++ b/packages/pds/src/api/app/bsky/notification/getUnreadCount.ts @@ -7,7 +7,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ auth, params }) => { const requester = auth.credentials.did const res = - await ctx.appviewAgent.api.app.bsky.notification.getUnreadCount( + await ctx.appViewAgent.api.app.bsky.notification.getUnreadCount( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/notification/listNotifications.ts b/packages/pds/src/api/app/bsky/notification/listNotifications.ts index 2f667172a57..48e75304af5 100644 --- a/packages/pds/src/api/app/bsky/notification/listNotifications.ts +++ b/packages/pds/src/api/app/bsky/notification/listNotifications.ts @@ -7,7 +7,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ params, auth }) => { const requester = auth.credentials.did const res = - await ctx.appviewAgent.api.app.bsky.notification.listNotifications( + await ctx.appViewAgent.api.app.bsky.notification.listNotifications( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/notification/registerPush.ts b/packages/pds/src/api/app/bsky/notification/registerPush.ts index 1abe5b83ed7..d5db39f1ac7 100644 --- a/packages/pds/src/api/app/bsky/notification/registerPush.ts +++ b/packages/pds/src/api/app/bsky/notification/registerPush.ts @@ -16,12 +16,14 @@ export default function (server: Server, ctx: AppContext) { const authHeaders = await ctx.serviceAuthHeaders(did, serviceDid) - if (ctx.canProxyWrite() && ctx.cfg.bskyAppViewDid === serviceDid) { - const { appviewAgent } = ctx - await appviewAgent.api.app.bsky.notification.registerPush(input.body, { - ...authHeaders, - encoding: 'application/json', - }) + if (ctx.cfg.bskyAppView.did === serviceDid) { + await ctx.appViewAgent.api.app.bsky.notification.registerPush( + input.body, + { + ...authHeaders, + encoding: 'application/json', + }, + ) return } diff --git a/packages/pds/src/api/app/bsky/notification/updateSeen.ts b/packages/pds/src/api/app/bsky/notification/updateSeen.ts index 2c115a61f67..44fe4bc13cc 100644 --- a/packages/pds/src/api/app/bsky/notification/updateSeen.ts +++ b/packages/pds/src/api/app/bsky/notification/updateSeen.ts @@ -1,4 +1,3 @@ -import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' @@ -6,36 +5,12 @@ export default function (server: Server, ctx: AppContext) { server.app.bsky.notification.updateSeen({ auth: ctx.accessVerifier, handler: async ({ input, auth }) => { - const { seenAt } = input.body const requester = auth.credentials.did - let parsed: string - try { - parsed = new Date(seenAt).toISOString() - } catch (_err) { - throw new InvalidRequestError('Invalid date') - } - - const user = await ctx.services.account(ctx.db).getAccount(requester) - if (!user) { - throw new InvalidRequestError(`Could not find user: ${requester}`) - } - - if (ctx.canProxyWrite()) { - await ctx.appviewAgent.api.app.bsky.notification.updateSeen( - input.body, - { - ...(await ctx.serviceAuthHeaders(requester)), - encoding: 'application/json', - }, - ) - } - - await ctx.db.db - .updateTable('user_state') - .set({ lastSeenNotifs: parsed }) - .where('did', '=', user.did) - .executeTakeFirst() + await ctx.appViewAgent.api.app.bsky.notification.updateSeen(input.body, { + ...(await ctx.serviceAuthHeaders(requester)), + encoding: 'application/json', + }) }, }) } diff --git a/packages/pds/src/api/app/bsky/unspecced/getPopular.ts b/packages/pds/src/api/app/bsky/unspecced/getPopular.ts index c4b4736cdc0..f890ea7baed 100644 --- a/packages/pds/src/api/app/bsky/unspecced/getPopular.ts +++ b/packages/pds/src/api/app/bsky/unspecced/getPopular.ts @@ -10,7 +10,7 @@ export default function (server: Server, ctx: AppContext) { const HOT_CLASSIC_URI = 'at://did:plc:z72i7hdynmk6r22z27h6tvur/app.bsky.feed.generator/hot-classic' const HOT_CLASSIC_DID = 'did:plc:5fllqkujj6kqp5izd5jg7gox' - const res = await ctx.appviewAgent.api.app.bsky.feed.getFeed( + const res = await ctx.appViewAgent.api.app.bsky.feed.getFeed( { feed: HOT_CLASSIC_URI, limit: params.limit, cursor: params.cursor }, await ctx.serviceAuthHeaders(requester, HOT_CLASSIC_DID), ) diff --git a/packages/pds/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts b/packages/pds/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts index 3f41f18560c..abc556cdb70 100644 --- a/packages/pds/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts +++ b/packages/pds/src/api/app/bsky/unspecced/getPopularFeedGenerators.ts @@ -8,7 +8,7 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ auth, params }) => { const requester = auth.credentials.did const res = - await ctx.appviewAgent.api.app.bsky.unspecced.getPopularFeedGenerators( + await ctx.appViewAgent.api.app.bsky.unspecced.getPopularFeedGenerators( params, await ctx.serviceAuthHeaders(requester), ) diff --git a/packages/pds/src/api/app/bsky/util/index.ts b/packages/pds/src/api/app/bsky/util/index.ts deleted file mode 100644 index fc3864fbd8f..00000000000 --- a/packages/pds/src/api/app/bsky/util/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const isEnum = ( - object: T, - possibleValue: unknown, -): possibleValue is T[keyof T] => { - return Object.values(object).includes(possibleValue) -} diff --git a/packages/pds/src/api/com/atproto/admin/getModerationAction.ts b/packages/pds/src/api/com/atproto/admin/getModerationAction.ts index 10d38174149..258ca9d94a1 100644 --- a/packages/pds/src/api/com/atproto/admin/getModerationAction.ts +++ b/packages/pds/src/api/com/atproto/admin/getModerationAction.ts @@ -12,9 +12,9 @@ export default function (server: Server, ctx: AppContext) { const accountService = services.account(db) const moderationService = services.moderation(db) - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { const { data: resultAppview } = - await ctx.appviewAgent.com.atproto.admin.getModerationAction( + await ctx.appViewAgent.com.atproto.admin.getModerationAction( params, authPassthru(req), ) diff --git a/packages/pds/src/api/com/atproto/admin/getModerationActions.ts b/packages/pds/src/api/com/atproto/admin/getModerationActions.ts index 5467b590ea5..0ef48e99851 100644 --- a/packages/pds/src/api/com/atproto/admin/getModerationActions.ts +++ b/packages/pds/src/api/com/atproto/admin/getModerationActions.ts @@ -6,9 +6,9 @@ export default function (server: Server, ctx: AppContext) { server.com.atproto.admin.getModerationActions({ auth: ctx.roleVerifier, handler: async ({ req, params }) => { - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { const { data: result } = - await ctx.appviewAgent.com.atproto.admin.getModerationActions( + await ctx.appViewAgent.com.atproto.admin.getModerationActions( params, authPassthru(req), ) diff --git a/packages/pds/src/api/com/atproto/admin/getModerationReport.ts b/packages/pds/src/api/com/atproto/admin/getModerationReport.ts index 7c0592177fa..b75268ebdf8 100644 --- a/packages/pds/src/api/com/atproto/admin/getModerationReport.ts +++ b/packages/pds/src/api/com/atproto/admin/getModerationReport.ts @@ -12,9 +12,9 @@ export default function (server: Server, ctx: AppContext) { const accountService = services.account(db) const moderationService = services.moderation(db) - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { const { data: resultAppview } = - await ctx.appviewAgent.com.atproto.admin.getModerationReport( + await ctx.appViewAgent.com.atproto.admin.getModerationReport( params, authPassthru(req), ) diff --git a/packages/pds/src/api/com/atproto/admin/getModerationReports.ts b/packages/pds/src/api/com/atproto/admin/getModerationReports.ts index 8ef2b69c319..2d5dd329bc4 100644 --- a/packages/pds/src/api/com/atproto/admin/getModerationReports.ts +++ b/packages/pds/src/api/com/atproto/admin/getModerationReports.ts @@ -6,9 +6,9 @@ export default function (server: Server, ctx: AppContext) { server.com.atproto.admin.getModerationReports({ auth: ctx.roleVerifier, handler: async ({ req, params }) => { - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { const { data: result } = - await ctx.appviewAgent.com.atproto.admin.getModerationReports( + await ctx.appViewAgent.com.atproto.admin.getModerationReports( params, authPassthru(req), ) diff --git a/packages/pds/src/api/com/atproto/admin/getRecord.ts b/packages/pds/src/api/com/atproto/admin/getRecord.ts index 4da9cf9246d..b68d01aefda 100644 --- a/packages/pds/src/api/com/atproto/admin/getRecord.ts +++ b/packages/pds/src/api/com/atproto/admin/getRecord.ts @@ -20,10 +20,10 @@ export default function (server: Server, ctx: AppContext) { includeEmails: access.moderator, })) - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { try { const { data: recordDetailAppview } = - await ctx.appviewAgent.com.atproto.admin.getRecord( + await ctx.appViewAgent.com.atproto.admin.getRecord( params, authPassthru(req), ) diff --git a/packages/pds/src/api/com/atproto/admin/getRepo.ts b/packages/pds/src/api/com/atproto/admin/getRepo.ts index b3a0f107bc8..19e07862851 100644 --- a/packages/pds/src/api/com/atproto/admin/getRepo.ts +++ b/packages/pds/src/api/com/atproto/admin/getRepo.ts @@ -17,10 +17,10 @@ export default function (server: Server, ctx: AppContext) { includeEmails: access.moderator, })) - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { try { let { data: repoDetailAppview } = - await ctx.appviewAgent.com.atproto.admin.getRepo( + await ctx.appViewAgent.com.atproto.admin.getRepo( params, authPassthru(req), ) diff --git a/packages/pds/src/api/com/atproto/admin/searchRepos.ts b/packages/pds/src/api/com/atproto/admin/searchRepos.ts index 42ec5aa4058..da2d7fa3788 100644 --- a/packages/pds/src/api/com/atproto/admin/searchRepos.ts +++ b/packages/pds/src/api/com/atproto/admin/searchRepos.ts @@ -1,7 +1,6 @@ +import { sql } from 'kysely' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' -import { SearchKeyset } from '../../../../services/util/search' -import { sql } from 'kysely' import { ListKeyset } from '../../../../services/account' import { authPassthru } from './util' @@ -9,11 +8,11 @@ export default function (server: Server, ctx: AppContext) { server.com.atproto.admin.searchRepos({ auth: ctx.roleVerifier, handler: async ({ req, params, auth }) => { - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { // @TODO merge invite details to this list view. could also add // support for invitedBy param, which is not supported by appview. const { data: result } = - await ctx.appviewAgent.com.atproto.admin.searchRepos( + await ctx.appViewAgent.com.atproto.admin.searchRepos( params, authPassthru(req), ) @@ -26,14 +25,15 @@ export default function (server: Server, ctx: AppContext) { const access = auth.credentials const { db, services } = ctx const moderationService = services.moderation(db) - const { term = '', limit = 50, cursor, invitedBy } = params + const { limit, cursor, invitedBy } = params + const query = params.q?.trim() ?? params.term?.trim() ?? '' + + const keyset = new ListKeyset(sql``, sql``) - if (!term) { + if (!query) { const results = await services .account(db) .list({ limit, cursor, includeSoftDeleted: true, invitedBy }) - const keyset = new ListKeyset(sql``, sql``) - return { encoding: 'application/json', body: { @@ -45,19 +45,17 @@ export default function (server: Server, ctx: AppContext) { } } - const searchField = term.startsWith('did:') ? 'did' : 'handle' - const results = await services .account(db) - .search({ searchField, term, limit, cursor, includeSoftDeleted: true }) - const keyset = new SearchKeyset(sql``, sql``) + .search({ query, limit, cursor, includeSoftDeleted: true }) return { encoding: 'application/json', body: { // For did search, we can only find 1 or no match, cursors can be ignored entirely - cursor: - searchField === 'did' ? undefined : keyset.packFromResult(results), + cursor: query.startsWith('did:') + ? undefined + : keyset.packFromResult(results), repos: await moderationService.views.repo(results, { includeEmails: access.moderator, }), diff --git a/packages/pds/src/api/com/atproto/admin/takeModerationAction.ts b/packages/pds/src/api/com/atproto/admin/takeModerationAction.ts index d4ca603efce..a8da631014a 100644 --- a/packages/pds/src/api/com/atproto/admin/takeModerationAction.ts +++ b/packages/pds/src/api/com/atproto/admin/takeModerationAction.ts @@ -19,9 +19,9 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ req, input, auth }) => { const access = auth.credentials const { db, services } = ctx - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { const { data: result } = - await ctx.appviewAgent.com.atproto.admin.takeModerationAction( + await ctx.appViewAgent.com.atproto.admin.takeModerationAction( input.body, authPassthru(req, true), ) diff --git a/packages/pds/src/api/com/atproto/identity/resolveHandle.ts b/packages/pds/src/api/com/atproto/identity/resolveHandle.ts index 4894103d1c1..f708f3f93d8 100644 --- a/packages/pds/src/api/com/atproto/identity/resolveHandle.ts +++ b/packages/pds/src/api/com/atproto/identity/resolveHandle.ts @@ -23,7 +23,7 @@ export default function (server: Server, ctx: AppContext) { if (user) { did = user.did } else { - const supportedHandle = ctx.cfg.availableUserDomains.some( + const supportedHandle = ctx.cfg.identity.serviceHandleDomains.some( (host) => handle.endsWith(host) || handle === host.slice(1), ) // this should be in our DB & we couldn't find it, so fail @@ -34,7 +34,7 @@ export default function (server: Server, ctx: AppContext) { // this is not someone on our server, but we help with resolving anyway if (!did) { - did = await tryResolveFromAppview(ctx.appviewAgent, handle) + did = await tryResolveFromAppView(ctx.appViewAgent, handle) } if (!did) { @@ -52,7 +52,7 @@ export default function (server: Server, ctx: AppContext) { }) } -async function tryResolveFromAppview(agent: AtpAgent, handle: string) { +async function tryResolveFromAppView(agent: AtpAgent, handle: string) { try { const result = await agent.api.com.atproto.identity.resolveHandle({ handle, diff --git a/packages/pds/src/api/com/atproto/moderation/createReport.ts b/packages/pds/src/api/com/atproto/moderation/createReport.ts index 68aac3e86b3..83cd5f454e0 100644 --- a/packages/pds/src/api/com/atproto/moderation/createReport.ts +++ b/packages/pds/src/api/com/atproto/moderation/createReport.ts @@ -8,9 +8,9 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ input, auth }) => { const requester = auth.credentials.did - if (ctx.shouldProxyModeration()) { + if (ctx.cfg.bskyAppView.proxyModeration) { const { data: result } = - await ctx.appviewAgent.com.atproto.moderation.createReport( + await ctx.appViewAgent.com.atproto.moderation.createReport( input.body, { ...(await ctx.serviceAuthHeaders(requester)), diff --git a/packages/pds/src/api/com/atproto/repo/getRecord.ts b/packages/pds/src/api/com/atproto/repo/getRecord.ts index 29194c94843..5c99a7226c1 100644 --- a/packages/pds/src/api/com/atproto/repo/getRecord.ts +++ b/packages/pds/src/api/com/atproto/repo/getRecord.ts @@ -14,20 +14,20 @@ export default function (server: Server, ctx: AppContext) { const record = await ctx.services .record(ctx.db) .getRecord(uri, cid || null) - if (!record) { + if (!record || record.takedownId !== null) { throw new InvalidRequestError(`Could not locate record: ${uri}`) } return { encoding: 'application/json', body: { - uri: record.uri, + uri: uri.toString(), cid: record.cid, value: record.value, }, } } - const res = await ctx.appviewAgent.api.com.atproto.repo.getRecord(params) + const res = await ctx.appViewAgent.api.com.atproto.repo.getRecord(params) return { encoding: 'application/json', body: res.data, diff --git a/packages/pds/src/api/com/atproto/server/createAccount.ts b/packages/pds/src/api/com/atproto/server/createAccount.ts index 5827ff6c658..334c2f2b132 100644 --- a/packages/pds/src/api/com/atproto/server/createAccount.ts +++ b/packages/pds/src/api/com/atproto/server/createAccount.ts @@ -1,4 +1,5 @@ import { InvalidRequestError } from '@atproto/xrpc-server' +import disposable from 'disposable-email' import { normalizeAndValidateHandle } from '../../../../handle' import * as plc from '@did-plc/lib' import * as scrypt from '../../../../db/scrypt' @@ -20,13 +21,19 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ input, req }) => { const { email, password, inviteCode } = input.body - if (ctx.cfg.inviteRequired && !inviteCode) { + if (ctx.cfg.invites.required && !inviteCode) { throw new InvalidRequestError( 'No invite code provided', 'InvalidInviteCode', ) } + if (!disposable.validate(email)) { + throw new InvalidRequestError( + 'This email address is not supported, please use a different email.', + ) + } + // normalize & ensure valid handle const handle = await normalizeAndValidateHandle({ ctx, @@ -35,7 +42,7 @@ export default function (server: Server, ctx: AppContext) { }) // check that the invite code still has uses - if (ctx.cfg.inviteRequired && inviteCode) { + if (ctx.cfg.invites.required && inviteCode) { await ensureCodeIsAvailable(ctx.db, inviteCode) } @@ -53,7 +60,7 @@ export default function (server: Server, ctx: AppContext) { // it's a bit goofy that we run this logic twice, // but we run it once for a sanity check before doing scrypt & plc ops // & a second time for locking + integrity check - if (ctx.cfg.inviteRequired && inviteCode) { + if (ctx.cfg.invites.required && inviteCode) { await ensureCodeIsAvailable(dbTxn, inviteCode, true) } @@ -86,7 +93,7 @@ export default function (server: Server, ctx: AppContext) { } // insert invite code use - if (ctx.cfg.inviteRequired && inviteCode) { + if (ctx.cfg.invites.required && inviteCode) { await dbTxn.db .insertInto('invite_code_use') .values({ @@ -176,7 +183,10 @@ const getDidAndPlcOp = async ( // if the user is not bringing a DID, then we format a create op for PLC // but we don't send until we ensure the username & email are available if (!input.did) { - const rotationKeys = [ctx.cfg.recoveryKey, ctx.plcRotationKey.did()] + const rotationKeys = [ctx.plcRotationKey.did()] + if (ctx.cfg.identity.recoveryDidKey) { + rotationKeys.unshift(ctx.cfg.identity.recoveryDidKey) + } if (input.recoveryKey) { rotationKeys.unshift(input.recoveryKey) } @@ -184,7 +194,7 @@ const getDidAndPlcOp = async ( signingKey: ctx.repoSigningKey.did(), rotationKeys, handle, - pds: ctx.cfg.publicUrl, + pds: ctx.cfg.service.publicUrl, signer: ctx.plcRotationKey, }) return { @@ -210,7 +220,7 @@ const getDidAndPlcOp = async ( 'provided handle does not match DID document handle', 'IncompatibleDidDoc', ) - } else if (atpData.pds !== ctx.cfg.publicUrl) { + } else if (atpData.pds !== ctx.cfg.service.publicUrl) { throw new InvalidRequestError( 'DID document pds endpoint does not match service endpoint', 'IncompatibleDidDoc', diff --git a/packages/pds/src/api/com/atproto/server/deleteAccount.ts b/packages/pds/src/api/com/atproto/server/deleteAccount.ts index 9ebfcfa7fdf..4d12edb1b32 100644 --- a/packages/pds/src/api/com/atproto/server/deleteAccount.ts +++ b/packages/pds/src/api/com/atproto/server/deleteAccount.ts @@ -2,7 +2,6 @@ import { AuthRequiredError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import { TAKEDOWN } from '../../../../lexicon/types/com/atproto/admin/defs' import AppContext from '../../../../context' -import Database from '../../../../db' import { MINUTE } from '@atproto/common' const REASON_ACCT_DELETION = 'ACCOUNT DELETION' @@ -22,40 +21,18 @@ export default function (server: Server, ctx: AppContext) { throw new AuthRequiredError('Invalid did or password') } - const tokenInfo = await ctx.db.db - .selectFrom('did_handle') - .innerJoin( - 'delete_account_token as token', - 'token.did', - 'did_handle.did', - ) - .where('did_handle.did', '=', did) - .where('token.token', '=', token.toUpperCase()) - .select([ - 'token.token as token', - 'token.requestedAt as requestedAt', - 'token.did as did', - ]) - .executeTakeFirst() - - if (!tokenInfo) { - return createInvalidTokenError() - } + await ctx.services + .account(ctx.db) + .assertValidToken(did, 'delete_account', token) const now = new Date() - const requestedAt = new Date(tokenInfo.requestedAt) - const expiresAt = new Date(requestedAt.getTime() + 15 * minsToMs) - if (now > expiresAt) { - await removeDeleteToken(ctx.db, tokenInfo.did) - return createExpiredTokenError() - } - await ctx.db.transaction(async (dbTxn) => { + const accountService = ctx.services.account(dbTxn) const moderationTxn = ctx.services.moderation(dbTxn) const [currentAction] = await moderationTxn.getCurrentActions({ did }) if (currentAction?.action === TAKEDOWN) { // Do not disturb an existing takedown, continue with account deletion - return await removeDeleteToken(dbTxn, did) + return await accountService.deleteEmailToken(did, 'delete_account') } if (currentAction) { // Reverse existing action to replace it with a self-takedown @@ -74,7 +51,7 @@ export default function (server: Server, ctx: AppContext) { createdAt: now, }) await moderationTxn.takedownRepo({ did, takedownId: takedown.id }) - await removeDeleteToken(dbTxn, did) + await accountService.deleteEmailToken(did, 'delete_account') }) ctx.backgroundQueue.add(async (db) => { @@ -90,34 +67,3 @@ export default function (server: Server, ctx: AppContext) { }, }) } - -type ErrorResponse = { - status: number - error: string - message: string -} - -const minsToMs = 60 * 1000 - -const createInvalidTokenError = (): ErrorResponse & { - error: 'InvalidToken' -} => ({ - status: 400, - error: 'InvalidToken', - message: 'Token is invalid', -}) - -const createExpiredTokenError = (): ErrorResponse & { - error: 'ExpiredToken' -} => ({ - status: 400, - error: 'ExpiredToken', - message: 'The password reset token has expired', -}) - -const removeDeleteToken = async (db: Database, did: string) => { - await db.db - .deleteFrom('delete_account_token') - .where('delete_account_token.did', '=', did) - .execute() -} diff --git a/packages/pds/src/api/com/atproto/server/describeServer.ts b/packages/pds/src/api/com/atproto/server/describeServer.ts index 0a7b1594cd4..0ad3b2d66eb 100644 --- a/packages/pds/src/api/com/atproto/server/describeServer.ts +++ b/packages/pds/src/api/com/atproto/server/describeServer.ts @@ -3,10 +3,10 @@ import AppContext from '../../../../context' export default function (server: Server, ctx: AppContext) { server.com.atproto.server.describeServer(() => { - const availableUserDomains = ctx.cfg.availableUserDomains - const inviteCodeRequired = ctx.cfg.inviteRequired - const privacyPolicy = ctx.cfg.privacyPolicyUrl - const termsOfService = ctx.cfg.termsOfServiceUrl + const availableUserDomains = ctx.cfg.identity.serviceHandleDomains + const inviteCodeRequired = ctx.cfg.invites.required + const privacyPolicy = ctx.cfg.service.privacyPolicyUrl + const termsOfService = ctx.cfg.service.termsOfServiceUrl return { encoding: 'application/json', diff --git a/packages/pds/src/api/com/atproto/server/getAccountInviteCodes.ts b/packages/pds/src/api/com/atproto/server/getAccountInviteCodes.ts index 671528fcdd4..275398f7609 100644 --- a/packages/pds/src/api/com/atproto/server/getAccountInviteCodes.ts +++ b/packages/pds/src/api/com/atproto/server/getAccountInviteCodes.ts @@ -25,13 +25,17 @@ export default function (server: Server, ctx: AppContext) { let created: string[] = [] const now = new Date().toISOString() - if (createAvailable && ctx.cfg.userInviteInterval !== null) { + if ( + createAvailable && + ctx.cfg.invites.required && + ctx.cfg.invites.interval !== null + ) { const { toCreate, total } = await calculateCodesToCreate({ did: requester, userCreatedAt: new Date(user.createdAt).getTime(), codes: userCodes, - epoch: ctx.cfg.userInviteEpoch, - interval: ctx.cfg.userInviteInterval, + epoch: ctx.cfg.invites.epoch, + interval: ctx.cfg.invites.interval, }) if (toCreate > 0) { created = genInvCodes(ctx.cfg, toCreate) diff --git a/packages/pds/src/api/com/atproto/server/requestAccountDelete.ts b/packages/pds/src/api/com/atproto/server/requestAccountDelete.ts index a448d97c02e..c438c32f69f 100644 --- a/packages/pds/src/api/com/atproto/server/requestAccountDelete.ts +++ b/packages/pds/src/api/com/atproto/server/requestAccountDelete.ts @@ -1,7 +1,6 @@ import { InvalidRequestError } from '@atproto/xrpc-server' import { Server } from '../../../../lexicon' import AppContext from '../../../../context' -import { getRandomToken } from './util' export default function (server: Server, ctx: AppContext) { server.com.atproto.server.requestAccountDelete({ @@ -12,15 +11,9 @@ export default function (server: Server, ctx: AppContext) { if (!user) { throw new InvalidRequestError('user not found') } - const token = getRandomToken().toUpperCase() - const requestedAt = new Date().toISOString() - await ctx.db.db - .insertInto('delete_account_token') - .values({ did, token, requestedAt }) - .onConflict((oc) => - oc.column('did').doUpdateSet({ token, requestedAt }), - ) - .execute() + const token = await ctx.services + .account(ctx.db) + .createEmailToken(did, 'delete_account') await ctx.mailer.sendAccountDelete({ token }, { to: user.email }) }, }) diff --git a/packages/pds/src/api/com/atproto/server/requestPasswordReset.ts b/packages/pds/src/api/com/atproto/server/requestPasswordReset.ts index 5d81e43c68b..61b17ebb9a9 100644 --- a/packages/pds/src/api/com/atproto/server/requestPasswordReset.ts +++ b/packages/pds/src/api/com/atproto/server/requestPasswordReset.ts @@ -1,6 +1,5 @@ import AppContext from '../../../../context' import { Server } from '../../../../lexicon' -import { getRandomToken } from './util' export default function (server: Server, ctx: AppContext) { server.com.atproto.server.requestPasswordReset(async ({ input }) => { @@ -9,16 +8,9 @@ export default function (server: Server, ctx: AppContext) { const user = await ctx.services.account(ctx.db).getAccountByEmail(email) if (user) { - const token = getRandomToken().toUpperCase() - const grantedAt = new Date().toISOString() - await ctx.db.db - .updateTable('user_account') - .where('did', '=', user.did) - .set({ - passwordResetToken: token, - passwordResetGrantedAt: grantedAt, - }) - .execute() + const token = await ctx.services + .account(ctx.db) + .createEmailToken(user.did, 'reset_password') await ctx.mailer.sendResetPassword( { handle: user.handle, token }, { to: user.email }, diff --git a/packages/pds/src/api/com/atproto/server/resetPassword.ts b/packages/pds/src/api/com/atproto/server/resetPassword.ts index de8d10382c0..a84b6249a3c 100644 --- a/packages/pds/src/api/com/atproto/server/resetPassword.ts +++ b/packages/pds/src/api/com/atproto/server/resetPassword.ts @@ -1,6 +1,5 @@ import AppContext from '../../../../context' import { Server } from '../../../../lexicon' -import Database from '../../../../db' import { MINUTE } from '@atproto/common' export default function (server: Server, ctx: AppContext) { @@ -14,69 +13,16 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ input }) => { const { token, password } = input.body - const tokenInfo = await ctx.db.db - .selectFrom('user_account') - .select(['did', 'passwordResetGrantedAt']) - .where('passwordResetToken', '=', token.toUpperCase()) - .executeTakeFirst() - - if (!tokenInfo?.passwordResetGrantedAt) { - return createInvalidTokenError() - } - - const now = new Date() - const grantedAt = new Date(tokenInfo.passwordResetGrantedAt) - const expiresAt = new Date(grantedAt.getTime() + 15 * minsToMs) - - if (now > expiresAt) { - await unsetResetToken(ctx.db, tokenInfo.did) - return createExpiredTokenError() - } + const did = await ctx.services + .account(ctx.db) + .assertValidTokenAndFindDid('reset_password', token) await ctx.db.transaction(async (dbTxn) => { - await unsetResetToken(dbTxn, tokenInfo.did) - await ctx.services - .account(dbTxn) - .updateUserPassword(tokenInfo.did, password) - await await ctx.services - .auth(dbTxn) - .revokeRefreshTokensByDid(tokenInfo.did) + const accountService = ctx.services.account(ctx.db) + await accountService.updateUserPassword(did, password) + await accountService.deleteEmailToken(did, 'reset_password') + await ctx.services.auth(dbTxn).revokeRefreshTokensByDid(did) }) }, }) } - -type ErrorResponse = { - status: number - error: string - message: string -} - -const minsToMs = 60 * 1000 - -const createInvalidTokenError = (): ErrorResponse & { - error: 'InvalidToken' -} => ({ - status: 400, - error: 'InvalidToken', - message: 'Token is invalid', -}) - -const createExpiredTokenError = (): ErrorResponse & { - error: 'ExpiredToken' -} => ({ - status: 400, - error: 'ExpiredToken', - message: 'The password reset token has expired', -}) - -const unsetResetToken = async (db: Database, did: string) => { - await db.db - .updateTable('user_account') - .where('did', '=', did) - .set({ - passwordResetToken: null, - passwordResetGrantedAt: null, - }) - .execute() -} diff --git a/packages/pds/src/api/com/atproto/server/updateEmail.ts b/packages/pds/src/api/com/atproto/server/updateEmail.ts index e5b013d8eba..1873f5e0157 100644 --- a/packages/pds/src/api/com/atproto/server/updateEmail.ts +++ b/packages/pds/src/api/com/atproto/server/updateEmail.ts @@ -1,6 +1,7 @@ import { Server } from '../../../../lexicon' import AppContext from '../../../../context' import { InvalidRequestError } from '@atproto/xrpc-server' +import disposable from 'disposable-email' export default function (server: Server, ctx: AppContext) { server.com.atproto.server.updateEmail({ @@ -8,22 +9,30 @@ export default function (server: Server, ctx: AppContext) { handler: async ({ auth, input }) => { const did = auth.credentials.did const { token, email } = input.body + if (!disposable.validate(email)) { + throw new InvalidRequestError( + 'This email address is not supported, please use a different email.', + ) + } const user = await ctx.services.account(ctx.db).getAccount(did) if (!user) { throw new InvalidRequestError('user not found') } + if (!user.emailConfirmedAt) { + throw new InvalidRequestError('email must be confirmed (temporary)') + } // require valid token - // @TODO re-enable updating non-verified emails - // if (user.emailConfirmedAt) { - if (!token) { - throw new InvalidRequestError( - 'confirmation token required', - 'TokenRequired', - ) + if (user.emailConfirmedAt) { + if (!token) { + throw new InvalidRequestError( + 'confirmation token required', + 'TokenRequired', + ) + } + await ctx.services + .account(ctx.db) + .assertValidToken(did, 'update_email', token) } - await ctx.services - .account(ctx.db) - .assertValidToken(did, 'update_email', token) await ctx.db.transaction(async (dbTxn) => { const accntSrvce = ctx.services.account(dbTxn) diff --git a/packages/pds/src/api/com/atproto/server/util.ts b/packages/pds/src/api/com/atproto/server/util.ts index 7182968db51..71bd1ae219c 100644 --- a/packages/pds/src/api/com/atproto/server/util.ts +++ b/packages/pds/src/api/com/atproto/server/util.ts @@ -6,7 +6,7 @@ import { ServerConfig } from '../../../../config' // ex: bsky-app-abc234-567xy // regex: bsky-app-[a-z2-7]{5}-[a-z2-7]{5} export const genInvCode = (cfg: ServerConfig): string => { - return cfg.publicHostname.replaceAll('.', '-') + '-' + getRandomToken() + return cfg.service.hostname.replaceAll('.', '-') + '-' + getRandomToken() } export const genInvCodes = (cfg: ServerConfig, count: number): string[] => { diff --git a/packages/pds/src/api/com/atproto/sync/getBlob.ts b/packages/pds/src/api/com/atproto/sync/getBlob.ts index f5c5c55f839..b92154af80f 100644 --- a/packages/pds/src/api/com/atproto/sync/getBlob.ts +++ b/packages/pds/src/api/com/atproto/sync/getBlob.ts @@ -4,6 +4,7 @@ import AppContext from '../../../../context' import { InvalidRequestError } from '@atproto/xrpc-server' import { notSoftDeletedClause } from '../../../../db/util' import { isUserOrAdmin } from '../../../../auth' +import { BlobNotFoundError } from '@atproto/repo' export default function (server: Server, ctx: AppContext) { server.com.atproto.sync.getBlob({ @@ -28,10 +29,18 @@ export default function (server: Server, ctx: AppContext) { ) .executeTakeFirst() if (!found) { - throw new InvalidRequestError(`blob not found: ${params.cid}`) + throw new InvalidRequestError('Blob not found') } const cid = CID.parse(params.cid) - const blobStream = await ctx.blobstore.getStream(cid) + let blobStream + try { + blobStream = await ctx.blobstore.getStream(cid) + } catch (err) { + if (err instanceof BlobNotFoundError) { + throw new InvalidRequestError('Blob not found') + } + throw err + } res.setHeader('content-length', found.size) res.setHeader('x-content-type-options', 'nosniff') res.setHeader('content-security-policy', `default-src 'none'; sandbox`) diff --git a/packages/pds/src/api/com/atproto/sync/subscribeRepos.ts b/packages/pds/src/api/com/atproto/sync/subscribeRepos.ts index d5b1bea788d..18eba6d41f0 100644 --- a/packages/pds/src/api/com/atproto/sync/subscribeRepos.ts +++ b/packages/pds/src/api/com/atproto/sync/subscribeRepos.ts @@ -8,12 +8,12 @@ export default function (server: Server, ctx: AppContext) { server.com.atproto.sync.subscribeRepos(async function* ({ params, signal }) { const { cursor } = params const outbox = new Outbox(ctx.sequencer, { - maxBufferSize: ctx.cfg.maxSubscriptionBuffer, + maxBufferSize: ctx.cfg.subscription.maxBuffer, }) httpLogger.info({ cursor }, 'request to com.atproto.sync.subscribeRepos') const backfillTime = new Date( - Date.now() - ctx.cfg.repoBackfillLimitMs, + Date.now() - ctx.cfg.subscription.repoBackfillLimitMs, ).toISOString() if (cursor !== undefined) { const [next, curr] = await Promise.all([ diff --git a/packages/pds/src/auth.ts b/packages/pds/src/auth.ts index 66c9b1f0780..6d75f1fd920 100644 --- a/packages/pds/src/auth.ts +++ b/packages/pds/src/auth.ts @@ -120,14 +120,14 @@ export class ServerAuth { return { status: Missing, admin: false, moderator: false, triage: false } } const { username, password } = parsed - if (username === 'admin' && password === this._triagePass) { - return { status: Valid, admin: false, moderator: false, triage: true } + if (username === 'admin' && password === this._adminPass) { + return { status: Valid, admin: true, moderator: true, triage: true } } if (username === 'admin' && password === this._moderatorPass) { return { status: Valid, admin: false, moderator: true, triage: true } } - if (username === 'admin' && password === this._adminPass) { - return { status: Valid, admin: true, moderator: true, triage: true } + if (username === 'admin' && password === this._triagePass) { + return { status: Valid, admin: false, moderator: false, triage: true } } return { status: Invalid, admin: false, moderator: false, triage: false } } diff --git a/packages/pds/src/basic-routes.ts b/packages/pds/src/basic-routes.ts index a4409dac487..aa094bea635 100644 --- a/packages/pds/src/basic-routes.ts +++ b/packages/pds/src/basic-routes.ts @@ -20,7 +20,7 @@ export const createRouter = (ctx: AppContext): express.Router => { }) router.get('/xrpc/_health', async function (req, res) { - const { version } = ctx.cfg + const { version } = ctx.cfg.service try { await sql`select 1`.execute(ctx.db.db) } catch (err) { diff --git a/packages/pds/src/bin.ts b/packages/pds/src/bin.ts deleted file mode 100644 index 7dfa946204b..00000000000 --- a/packages/pds/src/bin.ts +++ /dev/null @@ -1,52 +0,0 @@ -import './env' -import { ServerConfig } from './config' -import * as crypto from '@atproto/crypto' -import Database from './db' -import PDS from './index' -import { DiskBlobStore, MemoryBlobStore } from './storage' -import { BlobStore } from '@atproto/repo' - -const run = async () => { - let db: Database - - const keypair = await crypto.P256Keypair.create() - const cfg = ServerConfig.readEnv({ - serverDid: keypair.did(), - recoveryKey: keypair.did(), - }) - - if (cfg.dbPostgresUrl) { - db = Database.postgres({ - url: cfg.dbPostgresUrl, - schema: cfg.dbPostgresSchema, - }) - } else if (cfg.databaseLocation) { - db = Database.sqlite(cfg.databaseLocation) - } else { - db = Database.memory() - } - - await db.migrateToLatestOrThrow() - - let blobstore: BlobStore - if (cfg.blobstoreLocation) { - blobstore = await DiskBlobStore.create( - cfg.blobstoreLocation, - cfg.blobstoreTmp, - ) - } else { - blobstore = new MemoryBlobStore() - } - - const pds = PDS.create({ - db, - blobstore, - repoSigningKey: keypair, - plcRotationKey: keypair, - config: cfg, - }) - await pds.start() - console.log(`🌞 ATP Data server is running at ${cfg.origin}`) -} - -run() diff --git a/packages/pds/src/config.ts b/packages/pds/src/config.ts deleted file mode 100644 index f8d7b04d8fa..00000000000 --- a/packages/pds/src/config.ts +++ /dev/null @@ -1,498 +0,0 @@ -import { parseIntWithFallback, DAY, HOUR } from '@atproto/common' - -export interface ServerConfigValues { - debugMode?: boolean - version: string - - publicUrl?: string - scheme: string - port?: number - hostname: string - - dbPostgresUrl?: string - dbPostgresSchema?: string - - blobstoreLocation?: string - blobstoreTmp?: string - - jwtSecret: string - - didPlcUrl: string - didCacheStaleTTL: number - didCacheMaxTTL: number - - serverDid: string - recoveryKey: string - adminPassword: string - moderatorPassword?: string - triagePassword?: string - - inviteRequired: boolean - userInviteInterval: number | null - userInviteEpoch: number - privacyPolicyUrl?: string - termsOfServiceUrl?: string - - databaseLocation?: string - - availableUserDomains: string[] - handleResolveNameservers?: string[] - - rateLimitsEnabled: boolean - rateLimitBypassKey?: string - rateLimitBypassIps?: string[] - redisScratchAddress?: string - redisScratchPassword?: string - - appUrlPasswordReset: string - emailSmtpUrl?: string - emailNoReplyAddress: string - moderationEmailAddress?: string - moderationEmailSmtpUrl?: string - - maxSubscriptionBuffer: number - repoBackfillLimitMs: number - sequencerLeaderLockId?: number - sequencerLeaderEnabled?: boolean - - // this is really only used in test environments - dbTxLockNonce?: string - - bskyAppViewEndpoint: string - bskyAppViewDid: string - bskyAppViewModeration?: boolean - bskyAppViewCdnUrlPattern?: string - - crawlersToNotify?: string[] -} - -export class ServerConfig { - constructor(private cfg: ServerConfigValues) { - const invalidDomain = cfg.availableUserDomains.find( - (domain) => domain.length < 1 || !domain.startsWith('.'), - ) - if (invalidDomain) { - throw new Error(`Invalid domain: ${invalidDomain}`) - } - } - - static readEnv(overrides?: Partial) { - const debugMode = process.env.DEBUG_MODE === '1' - const version = process.env.PDS_VERSION || '0.0.0' - - const publicUrl = process.env.PUBLIC_URL || undefined - const hostname = process.env.HOSTNAME || 'localhost' - let scheme - if ('TLS' in process.env) { - scheme = process.env.TLS === '1' ? 'https' : 'http' - } else { - scheme = hostname === 'localhost' ? 'http' : 'https' - } - const port = parseIntWithFallback(process.env.PORT, 2583) - - const jwtSecret = process.env.JWT_SECRET || 'jwt_secret' - - const didPlcUrl = process.env.DID_PLC_URL || 'http://localhost:2582' - const didCacheStaleTTL = parseIntWithFallback( - process.env.DID_CACHE_STALE_TTL, - HOUR, - ) - const didCacheMaxTTL = parseIntWithFallback( - process.env.DID_CACHE_MAX_TTL, - DAY, - ) - - const serverDid = overrides?.serverDid || process.env.SERVER_DID - if (typeof serverDid !== 'string') { - throw new Error('No value provided for process.env.SERVER_DID') - } - - const recoveryKey = overrides?.recoveryKey || process.env.RECOVERY_KEY - if (typeof recoveryKey !== 'string') { - throw new Error('No value provided for process.env.RECOVERY_KEY') - } - - const adminPassword = process.env.ADMIN_PASSWORD || 'admin' - const moderatorPassword = process.env.MODERATOR_PASSWORD || undefined - const triagePassword = process.env.TRIAGE_PASSWORD || undefined - - const inviteRequired = process.env.INVITE_REQUIRED === 'true' ? true : false - const userInviteInterval = parseIntWithFallback( - process.env.USER_INVITE_INTERVAL, - null, - ) - const userInviteEpoch = parseIntWithFallback( - process.env.USER_INVITE_EPOCH, - 0, - ) - - const privacyPolicyUrl = process.env.PRIVACY_POLICY_URL - const termsOfServiceUrl = process.env.TERMS_OF_SERVICE_URL - - const databaseLocation = process.env.DATABASE_LOC - - const blobstoreLocation = process.env.BLOBSTORE_LOC - const blobstoreTmp = process.env.BLOBSTORE_TMP - - const availableUserDomains = process.env.AVAILABLE_USER_DOMAINS - ? process.env.AVAILABLE_USER_DOMAINS.split(',') - : [] - - const handleResolveNameservers = process.env.HANDLE_RESOLVE_NAMESERVERS - ? process.env.HANDLE_RESOLVE_NAMESERVERS.split(',') - : [] - - const rateLimitsEnabled = process.env.RATE_LIMITS_ENABLED === 'true' - const rateLimitBypassKey = nonemptyString(process.env.RATE_LIMIT_BYPASS_KEY) - const rateLimitBypassIpsStr = nonemptyString( - process.env.RATE_LIMIT_BYPASS_IPS, - ) - const rateLimitBypassIps = rateLimitBypassIpsStr - ? rateLimitBypassIpsStr.split(',').map((ipOrCidr) => { - const ip = ipOrCidr.split('/')[0] - return ip.trim() - }) - : undefined - const redisScratchAddress = nonemptyString( - process.env.REDIS_SCRATCH_ADDRESS, - ) - const redisScratchPassword = nonemptyString( - process.env.REDIS_SCRATCH_PASSWORD, - ) - - const appUrlPasswordReset = - process.env.APP_URL_PASSWORD_RESET || 'app://password-reset' - - const emailSmtpUrl = process.env.EMAIL_SMTP_URL || undefined - - const emailNoReplyAddress = - process.env.EMAIL_NO_REPLY_ADDRESS || 'noreply@blueskyweb.xyz' - - const moderationEmailAddress = - process.env.MODERATION_EMAIL_ADDRESS || undefined - const moderationEmailSmtpUrl = - process.env.MODERATION_EMAIL_SMTP_URL || undefined - - const dbPostgresUrl = process.env.DB_POSTGRES_URL - const dbPostgresSchema = process.env.DB_POSTGRES_SCHEMA - - const maxSubscriptionBuffer = parseIntWithFallback( - process.env.MAX_SUBSCRIPTION_BUFFER, - 500, - ) - - const repoBackfillLimitMs = parseIntWithFallback( - process.env.REPO_BACKFILL_LIMIT_MS, - DAY, - ) - - const sequencerLeaderLockId = parseIntWithFallback( - process.env.SEQUENCER_LEADER_LOCK_ID, - undefined, - ) - - // by default each instance is a potential sequencer leader, but may be configured off - const sequencerLeaderEnabled = process.env.SEQUENCER_LEADER_ENABLED - ? process.env.SEQUENCER_LEADER_ENABLED !== '0' && - process.env.SEQUENCER_LEADER_ENABLED !== 'false' - : undefined - - const dbTxLockNonce = nonemptyString(process.env.DB_TX_LOCK_NONCE) - - const bskyAppViewEndpoint = nonemptyString( - process.env.BSKY_APP_VIEW_ENDPOINT, - ) - if (typeof bskyAppViewEndpoint !== 'string') { - throw new Error( - 'No value provided for process.env.BSKY_APP_VIEW_ENDPOINT', - ) - } - const bskyAppViewDid = nonemptyString(process.env.BSKY_APP_VIEW_DID) - if (typeof bskyAppViewDid !== 'string') { - throw new Error('No value provided for process.env.BSKY_APP_VIEW_DID') - } - const bskyAppViewModeration = - process.env.BSKY_APP_VIEW_MODERATION === 'true' ? true : false - const bskyAppViewCdnUrlPattern = nonemptyString( - process.env.BSKY_APP_VIEW_CDN_URL_PATTERN, - ) - - const crawlersEnv = process.env.CRAWLERS_TO_NOTIFY - const crawlersToNotify = - crawlersEnv && crawlersEnv.length > 0 ? crawlersEnv.split(',') : [] - - return new ServerConfig({ - debugMode, - version, - publicUrl, - scheme, - hostname, - port, - dbPostgresUrl, - dbPostgresSchema, - blobstoreLocation, - blobstoreTmp, - jwtSecret, - recoveryKey, - didPlcUrl, - didCacheStaleTTL, - didCacheMaxTTL, - serverDid, - adminPassword, - moderatorPassword, - triagePassword, - inviteRequired, - userInviteInterval, - userInviteEpoch, - privacyPolicyUrl, - termsOfServiceUrl, - databaseLocation, - availableUserDomains, - handleResolveNameservers, - rateLimitsEnabled, - rateLimitBypassKey, - rateLimitBypassIps, - redisScratchAddress, - redisScratchPassword, - appUrlPasswordReset, - emailSmtpUrl, - emailNoReplyAddress, - moderationEmailAddress, - moderationEmailSmtpUrl, - maxSubscriptionBuffer, - repoBackfillLimitMs, - sequencerLeaderLockId, - sequencerLeaderEnabled, - dbTxLockNonce, - bskyAppViewEndpoint, - bskyAppViewDid, - bskyAppViewModeration, - bskyAppViewCdnUrlPattern, - crawlersToNotify, - ...overrides, - }) - } - - get debugMode() { - return !!this.cfg.debugMode - } - - get version() { - return this.cfg.version - } - - get scheme() { - return this.cfg.scheme - } - - get port() { - return this.cfg.port - } - - get hostname() { - return this.cfg.hostname - } - - get internalUrl() { - return `${this.scheme}://${this.hostname}:${this.port}` - } - - get origin() { - const u = new URL(this.internalUrl) - return u.origin - } - - get publicUrl() { - return this.cfg.publicUrl || this.internalUrl - } - - get publicHostname() { - const u = new URL(this.publicUrl) - return u.hostname - } - - get dbPostgresUrl() { - return this.cfg.dbPostgresUrl - } - - get dbPostgresSchema() { - return this.cfg.dbPostgresSchema - } - - get blobstoreLocation() { - return this.cfg.blobstoreLocation - } - - get blobstoreTmp() { - return this.cfg.blobstoreTmp - } - - get jwtSecret() { - return this.cfg.jwtSecret - } - - get didPlcUrl() { - return this.cfg.didPlcUrl - } - - get didCacheStaleTTL() { - return this.cfg.didCacheStaleTTL - } - - get didCacheMaxTTL() { - return this.cfg.didCacheMaxTTL - } - - get serverDid() { - return this.cfg.serverDid - } - - get recoveryKey() { - return this.cfg.recoveryKey - } - - get adminPassword() { - return this.cfg.adminPassword - } - - get moderatorPassword() { - return this.cfg.moderatorPassword - } - - get triagePassword() { - return this.cfg.triagePassword - } - - get inviteRequired() { - return this.cfg.inviteRequired - } - - get userInviteInterval() { - return this.cfg.userInviteInterval - } - - get userInviteEpoch() { - return this.cfg.userInviteEpoch - } - - get privacyPolicyUrl() { - if ( - this.cfg.privacyPolicyUrl && - this.cfg.privacyPolicyUrl.startsWith('/') - ) { - return this.publicUrl + this.cfg.privacyPolicyUrl - } - return this.cfg.privacyPolicyUrl - } - - get termsOfServiceUrl() { - if ( - this.cfg.termsOfServiceUrl && - this.cfg.termsOfServiceUrl.startsWith('/') - ) { - return this.publicUrl + this.cfg.termsOfServiceUrl - } - return this.cfg.termsOfServiceUrl - } - - get databaseLocation() { - return this.cfg.databaseLocation - } - - get useMemoryDatabase() { - return !this.databaseLocation - } - - get availableUserDomains() { - return this.cfg.availableUserDomains - } - - get handleResolveNameservers() { - return this.cfg.handleResolveNameservers - } - - get rateLimitsEnabled() { - return this.cfg.rateLimitsEnabled - } - - get rateLimitBypassKey() { - return this.cfg.rateLimitBypassKey - } - - get rateLimitBypassIps() { - return this.cfg.rateLimitBypassIps - } - - get redisScratchAddress() { - return this.cfg.redisScratchAddress - } - - get redisScratchPassword() { - return this.cfg.redisScratchPassword - } - - get appUrlPasswordReset() { - return this.cfg.appUrlPasswordReset - } - - get emailSmtpUrl() { - return this.cfg.emailSmtpUrl - } - - get emailNoReplyAddress() { - return this.cfg.emailNoReplyAddress - } - - get moderationEmailAddress() { - return this.cfg.moderationEmailAddress - } - - get moderationEmailSmtpUrl() { - return this.cfg.moderationEmailSmtpUrl - } - - get maxSubscriptionBuffer() { - return this.cfg.maxSubscriptionBuffer - } - - get repoBackfillLimitMs() { - return this.cfg.repoBackfillLimitMs - } - - get sequencerLeaderLockId() { - return this.cfg.sequencerLeaderLockId - } - - get sequencerLeaderEnabled() { - return this.cfg.sequencerLeaderEnabled !== false - } - - get dbTxLockNonce() { - return this.cfg.dbTxLockNonce - } - - get bskyAppViewEndpoint() { - return this.cfg.bskyAppViewEndpoint - } - - get bskyAppViewDid() { - return this.cfg.bskyAppViewDid - } - - get bskyAppViewModeration() { - return this.cfg.bskyAppViewModeration - } - - get bskyAppViewCdnUrlPattern() { - return this.cfg.bskyAppViewCdnUrlPattern - } - - get crawlersToNotify() { - return this.cfg.crawlersToNotify - } -} - -const nonemptyString = (str: string | undefined): string | undefined => { - if (str === undefined || str.length === 0) return undefined - return str -} diff --git a/packages/pds/src/config/config.ts b/packages/pds/src/config/config.ts new file mode 100644 index 00000000000..68d043e6431 --- /dev/null +++ b/packages/pds/src/config/config.ts @@ -0,0 +1,299 @@ +import os from 'node:os' +import path from 'node:path' +import { DAY, HOUR, SECOND } from '@atproto/common' +import { ServerEnvironment } from './env' + +// off-config but still from env: +// logging: LOG_LEVEL, LOG_SYSTEMS, LOG_ENABLED, LOG_DESTINATION + +export const envToCfg = (env: ServerEnvironment): ServerConfig => { + const port = env.port ?? 2583 + const hostname = env.hostname ?? 'localhost' + const publicUrl = + hostname === 'localhost' + ? `http://localhost:${port}` + : `https://${hostname}` + const did = env.serviceDid ?? `did:web:${hostname}` + const serviceCfg: ServerConfig['service'] = { + port, + hostname, + publicUrl, + did, + version: env.version, // default? + privacyPolicyUrl: env.privacyPolicyUrl, + termsOfServiceUrl: env.termsOfServiceUrl, + } + + let dbCfg: ServerConfig['db'] + if (env.dbSqliteLocation && env.dbPostgresUrl) { + throw new Error('Cannot set both sqlite & postgres db env vars') + } + if (env.dbSqliteLocation) { + dbCfg = { + dialect: 'sqlite', + location: env.dbSqliteLocation, + } + } else if (env.dbPostgresUrl) { + dbCfg = { + dialect: 'pg', + url: env.dbPostgresUrl, + migrationUrl: env.dbPostgresMigrationUrl ?? env.dbPostgresUrl, + schema: env.dbPostgresSchema, + pool: { + idleTimeoutMs: env.dbPostgresPoolIdleTimeoutMs ?? 10000, + maxUses: env.dbPostgresPoolMaxUses ?? Infinity, + size: env.dbPostgresPoolSize ?? 10, + }, + } + } else { + throw new Error('Must configure either sqlite or postgres db') + } + + let blobstoreCfg: ServerConfig['blobstore'] + if (env.blobstoreS3Bucket && env.blobstoreDiskLocation) { + throw new Error('Cannot set both S3 and disk blobstore env vars') + } + if (env.blobstoreS3Bucket) { + blobstoreCfg = { provider: 's3', bucket: env.blobstoreS3Bucket } + } else if (env.blobstoreDiskLocation) { + blobstoreCfg = { + provider: 'disk', + location: env.blobstoreDiskLocation, + tempLocation: + env.blobstoreDiskTmpLocation ?? path.join(os.tmpdir(), 'pds/blobs'), + } + } else { + throw new Error('Must configure either S3 or disk blobstore') + } + + let serviceHandleDomains: string[] + if (env.serviceHandleDomains && env.serviceHandleDomains.length > 0) { + serviceHandleDomains = env.serviceHandleDomains + } else { + if (hostname === 'localhost') { + serviceHandleDomains = ['.test'] + } else { + serviceHandleDomains = [`.${hostname}`] + } + } + const invalidDomain = serviceHandleDomains.find( + (domain) => domain.length < 1 || !domain.startsWith('.'), + ) + if (invalidDomain) { + throw new Error(`Invalid handle domain: ${invalidDomain}`) + } + + const identityCfg: ServerConfig['identity'] = { + plcUrl: env.didPlcUrl ?? 'https://plc.directory', + cacheMaxTTL: env.didCacheMaxTTL ?? DAY, + cacheStaleTTL: env.didCacheStaleTTL ?? HOUR, + resolverTimeout: env.resolverTimeout ?? 3 * SECOND, + recoveryDidKey: env.recoveryDidKey ?? null, + serviceHandleDomains, + handleBackupNameservers: env.handleBackupNameservers, + } + + // default to being required if left undefined + const invitesCfg: ServerConfig['invites'] = + env.inviteRequired === false + ? { + required: false, + } + : { + required: true, + interval: env.inviteInterval ?? null, + epoch: env.inviteEpoch ?? 0, + } + + let emailCfg: ServerConfig['email'] + if (!env.emailFromAddress && !env.emailSmtpUrl) { + emailCfg = null + } else { + if (!env.emailFromAddress || !env.emailSmtpUrl) { + throw new Error( + 'Partial email config, must set both emailFromAddress and emailSmtpUrl', + ) + } + emailCfg = { + smtpUrl: env.emailSmtpUrl, + fromAddress: env.emailFromAddress, + } + } + + let moderationEmailCfg: ServerConfig['moderationEmail'] + if (!env.moderationEmailAddress && !env.moderationEmailSmtpUrl) { + moderationEmailCfg = null + } else { + if (!env.moderationEmailAddress || !env.moderationEmailSmtpUrl) { + throw new Error( + 'Partial moderation email config, must set both emailFromAddress and emailSmtpUrl', + ) + } + moderationEmailCfg = { + smtpUrl: env.moderationEmailSmtpUrl, + fromAddress: env.moderationEmailAddress, + } + } + + const subscriptionCfg: ServerConfig['subscription'] = { + maxBuffer: env.maxSubscriptionBuffer ?? 500, + repoBackfillLimitMs: env.repoBackfillLimitMs ?? DAY, + sequencerLeaderEnabled: env.sequencerLeaderEnabled ?? true, + sequencerLeaderLockId: env.sequencerLeaderLockId ?? 1100, + } + + if (!env.bskyAppViewUrl) { + throw new Error('Must configure PDS_BSKY_APP_VIEW_URL') + } else if (!env.bskyAppViewDid) { + throw new Error('Must configure PDS_BSKY_APP_VIEW_DID') + } + const bskyAppViewCfg: ServerConfig['bskyAppView'] = { + url: env.bskyAppViewUrl, + did: env.bskyAppViewDid, + proxyModeration: env.bskyAppViewModeration ?? false, + cdnUrlPattern: env.bskyAppViewCdnUrlPattern, + } + + const redisCfg: ServerConfig['redis'] = env.redisScratchAddress + ? { + address: env.redisScratchAddress, + password: env.redisScratchPassword, + } + : null + + const rateLimitsCfg: ServerConfig['rateLimits'] = env.rateLimitsEnabled + ? { + enabled: true, + mode: redisCfg !== null ? 'redis' : 'memory', + bypassKey: env.rateLimitBypassKey, + bypassIps: env.rateLimitBypassIps?.map((ipOrCidr) => + ipOrCidr.split('/')[0]?.trim(), + ), + } + : { enabled: false } + + const crawlersCfg: ServerConfig['crawlers'] = env.crawlers ?? [] + + return { + service: serviceCfg, + db: dbCfg, + blobstore: blobstoreCfg, + identity: identityCfg, + invites: invitesCfg, + email: emailCfg, + moderationEmail: moderationEmailCfg, + subscription: subscriptionCfg, + bskyAppView: bskyAppViewCfg, + redis: redisCfg, + rateLimits: rateLimitsCfg, + crawlers: crawlersCfg, + } +} + +export type ServerConfig = { + service: ServiceConfig + db: SqliteConfig | PostgresConfig + blobstore: S3BlobstoreConfig | DiskBlobstoreConfig + identity: IdentityConfig + invites: InvitesConfig + email: EmailConfig | null + moderationEmail: EmailConfig | null + subscription: SubscriptionConfig + bskyAppView: BksyAppViewConfig + redis: RedisScratchConfig | null + rateLimits: RateLimitsConfig + crawlers: string[] +} + +export type ServiceConfig = { + port: number + hostname: string + publicUrl: string + did: string + version?: string + privacyPolicyUrl?: string + termsOfServiceUrl?: string +} + +export type SqliteConfig = { + dialect: 'sqlite' + location: string +} + +export type PostgresPoolConfig = { + size: number + maxUses: number + idleTimeoutMs: number +} + +export type PostgresConfig = { + dialect: 'pg' + url: string + migrationUrl: string + pool: PostgresPoolConfig + schema?: string +} + +export type S3BlobstoreConfig = { + provider: 's3' + bucket: string +} + +export type DiskBlobstoreConfig = { + provider: 'disk' + location: string + tempLocation: string +} + +export type IdentityConfig = { + plcUrl: string + resolverTimeout: number + cacheStaleTTL: number + cacheMaxTTL: number + recoveryDidKey: string | null + serviceHandleDomains: string[] + handleBackupNameservers?: string[] +} + +export type InvitesConfig = + | { + required: true + interval: number | null + epoch: number + } + | { + required: false + } + +export type EmailConfig = { + smtpUrl: string + fromAddress: string +} + +export type SubscriptionConfig = { + maxBuffer: number + repoBackfillLimitMs: number + sequencerLeaderEnabled: boolean + sequencerLeaderLockId: number +} + +export type RedisScratchConfig = { + address: string + password?: string +} + +export type RateLimitsConfig = + | { + enabled: true + mode: 'memory' | 'redis' + bypassKey?: string + bypassIps?: string[] + } + | { enabled: false } + +export type BksyAppViewConfig = { + url: string + did: string + proxyModeration: boolean + cdnUrlPattern?: string +} diff --git a/packages/pds/src/config/env.ts b/packages/pds/src/config/env.ts new file mode 100644 index 00000000000..170e26d5976 --- /dev/null +++ b/packages/pds/src/config/env.ts @@ -0,0 +1,175 @@ +import { envInt, envStr, envBool, envList } from './util' + +export const readEnv = (): ServerEnvironment => { + return { + // service + port: envInt('PDS_PORT'), + hostname: envStr('PDS_HOSTNAME'), + serviceDid: envStr('PDS_SERVICE_DID'), + version: envStr('PDS_VERSION'), + privacyPolicyUrl: envStr('PDS_PRIVACY_POLICY_URL'), + termsOfServiceUrl: envStr('PDS_TERMS_OF_SERVICE_URL'), + + // db: one required + // sqlite + dbSqliteLocation: envStr('PDS_DB_SQLITE_LOCATION'), + // postgres + dbPostgresUrl: envStr('PDS_DB_POSTGRES_URL'), + dbPostgresMigrationUrl: envStr('PDS_DB_POSTGRES_MIGRATION_URL'), + dbPostgresSchema: envStr('PDS_DB_POSTGRES_SCHEMA'), + dbPostgresPoolSize: envInt('PDS_DB_POSTGRES_POOL_SIZE'), + dbPostgresPoolMaxUses: envInt('PDS_DB_POSTGRES_POOL_MAX_USES'), + dbPostgresPoolIdleTimeoutMs: envInt('PDS_DB_POSTGRES_POOL_IDLE_TIMEOUT_MS'), + + // blobstore: one required + // s3 + blobstoreS3Bucket: envStr('PDS_BLOBSTORE_S3_BUCKET'), + // disk + blobstoreDiskLocation: envStr('PDS_BLOBSTORE_DISK_LOCATION'), + blobstoreDiskTmpLocation: envStr('PDS_BLOBSTORE_DISK_TMP_LOCATION'), + + // identity + didPlcUrl: envStr('PDS_DID_PLC_URL'), + didCacheStaleTTL: envInt('PDS_DID_CACHE_STALE_TTL'), + didCacheMaxTTL: envInt('PDS_DID_CACHE_MAX_TTL'), + resolverTimeout: envInt('PDS_ID_RESOLVER_TIMEOUT'), + recoveryDidKey: envStr('PDS_RECOVERY_DID_KEY'), + serviceHandleDomains: envList('PDS_SERVICE_HANDLE_DOMAINS'), + handleBackupNameservers: envList('PDS_HANDLE_BACKUP_NAMESERVERS'), + + // invites + inviteRequired: envBool('PDS_INVITE_REQUIRED'), + inviteInterval: envInt('PDS_INVITE_INTERVAL'), + inviteEpoch: envInt('PDS_INVITE_EPOCH'), + + // email + emailSmtpUrl: envStr('PDS_EMAIL_SMTP_URL'), + emailFromAddress: envStr('PDS_EMAIL_FROM_ADDRESS'), + moderationEmailSmtpUrl: envStr('PDS_MODERATION_EMAIL_SMTP_URL'), + moderationEmailAddress: envStr('PDS_MODERATION_EMAIL_ADDRESS'), + + // subscription + maxSubscriptionBuffer: envInt('PDS_MAX_SUBSCRIPTION_BUFFER'), + repoBackfillLimitMs: envInt('PDS_REPO_BACKFILL_LIMIT_MS'), + sequencerLeaderEnabled: envBool('PDS_SEQUENCER_LEADER_ENABLED'), + sequencerLeaderLockId: envInt('PDS_SEQUENCER_LEADER_LOCK_ID'), + + // appview + bskyAppViewUrl: envStr('PDS_BSKY_APP_VIEW_URL'), + bskyAppViewDid: envStr('PDS_BSKY_APP_VIEW_DID'), + bskyAppViewModeration: envBool('PDS_BSKY_APP_VIEW_MODERATION'), + bskyAppViewCdnUrlPattern: envStr('PDS_BSKY_APP_VIEW_CDN_URL_PATTERN'), + + // rate limits + rateLimitsEnabled: envBool('PDS_RATE_LIMITS_ENABLED'), + rateLimitBypassKey: envStr('PDS_RATE_LIMIT_BYPASS_KEY'), + rateLimitBypassIps: envList('PDS_RATE_LIMIT_BYPASS_IPS'), + + // redis + redisScratchAddress: envStr('PDS_REDIS_SCRATCH_ADDRESS'), + redisScratchPassword: envStr('PDS_REDIS_SCRATCH_PASSWORD'), + + // crawlers + crawlers: envList('PDS_CRAWLERS'), + + // secrets + jwtSecret: envStr('PDS_JWT_SECRET'), + adminPassword: envStr('PDS_ADMIN_PASSWORD'), + moderatorPassword: envStr('PDS_MODERATOR_PASSWORD'), + triagePassword: envStr('PDS_TRIAGE_PASSWORD'), + + // keys: only one of each required + // kms + repoSigningKeyKmsKeyId: envStr('PDS_REPO_SIGNING_KEY_KMS_KEY_ID'), + // memory + repoSigningKeyK256PrivateKeyHex: envStr( + 'PDS_REPO_SIGNING_KEY_K256_PRIVATE_KEY_HEX', + ), + // kms + plcRotationKeyKmsKeyId: envStr('PDS_PLC_ROTATION_KEY_KMS_KEY_ID'), + // memory + plcRotationKeyK256PrivateKeyHex: envStr( + 'PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX', + ), + } +} + +export type ServerEnvironment = { + // service + port?: number + hostname?: string + serviceDid?: string + version?: string + privacyPolicyUrl?: string + termsOfServiceUrl?: string + + // db: one required + dbSqliteLocation?: string + dbPostgresUrl?: string + dbPostgresMigrationUrl?: string + dbPostgresSchema?: string + dbPostgresPoolSize?: number + dbPostgresPoolMaxUses?: number + dbPostgresPoolIdleTimeoutMs?: number + + // blobstore: one required + blobstoreS3Bucket?: string + blobstoreDiskLocation?: string + blobstoreDiskTmpLocation?: string + + // identity + didPlcUrl?: string + didCacheStaleTTL?: number + didCacheMaxTTL?: number + resolverTimeout?: number + recoveryDidKey?: string + serviceHandleDomains?: string[] // public hostname by default + handleBackupNameservers?: string[] + + // invites + inviteRequired?: boolean + inviteInterval?: number + inviteEpoch?: number + + // email + emailSmtpUrl?: string + emailFromAddress?: string + moderationEmailSmtpUrl?: string + moderationEmailAddress?: string + + // subscription + maxSubscriptionBuffer?: number + repoBackfillLimitMs?: number + sequencerLeaderEnabled?: boolean + sequencerLeaderLockId?: number + + // appview + bskyAppViewUrl?: string + bskyAppViewDid?: string + bskyAppViewModeration?: boolean + bskyAppViewCdnUrlPattern?: string + + // rate limits + rateLimitsEnabled?: boolean + rateLimitBypassKey?: string + rateLimitBypassIps?: string[] + + // redis + redisScratchAddress?: string + redisScratchPassword?: string + + // crawler + crawlers?: string[] + + // secrets + jwtSecret?: string + adminPassword?: string + moderatorPassword?: string + triagePassword?: string + + // keys + repoSigningKeyKmsKeyId?: string + repoSigningKeyK256PrivateKeyHex?: string + plcRotationKeyKmsKeyId?: string + plcRotationKeyK256PrivateKeyHex?: string +} diff --git a/packages/pds/src/config/index.ts b/packages/pds/src/config/index.ts new file mode 100644 index 00000000000..cd02efb6c87 --- /dev/null +++ b/packages/pds/src/config/index.ts @@ -0,0 +1,3 @@ +export * from './config' +export * from './env' +export * from './secrets' diff --git a/packages/pds/src/config/secrets.ts b/packages/pds/src/config/secrets.ts new file mode 100644 index 00000000000..f0f876f1ccc --- /dev/null +++ b/packages/pds/src/config/secrets.ts @@ -0,0 +1,74 @@ +import { ServerEnvironment } from './env' + +export const envToSecrets = (env: ServerEnvironment): ServerSecrets => { + let repoSigningKey: ServerSecrets['repoSigningKey'] + if (env.repoSigningKeyKmsKeyId && env.repoSigningKeyK256PrivateKeyHex) { + throw new Error('Cannot set both kms & memory keys for repo signing key') + } else if (env.repoSigningKeyKmsKeyId) { + repoSigningKey = { + provider: 'kms', + keyId: env.repoSigningKeyKmsKeyId, + } + } else if (env.repoSigningKeyK256PrivateKeyHex) { + repoSigningKey = { + provider: 'memory', + privateKeyHex: env.repoSigningKeyK256PrivateKeyHex, + } + } else { + throw new Error('Must configure repo signing key') + } + + let plcRotationKey: ServerSecrets['plcRotationKey'] + if (env.plcRotationKeyKmsKeyId && env.plcRotationKeyK256PrivateKeyHex) { + throw new Error('Cannot set both kms & memory keys for plc rotation key') + } else if (env.plcRotationKeyKmsKeyId) { + plcRotationKey = { + provider: 'kms', + keyId: env.plcRotationKeyKmsKeyId, + } + } else if (env.plcRotationKeyK256PrivateKeyHex) { + plcRotationKey = { + provider: 'memory', + privateKeyHex: env.plcRotationKeyK256PrivateKeyHex, + } + } else { + throw new Error('Must configure plc rotation key') + } + + if (!env.jwtSecret) { + throw new Error('Must provide a JWT secret') + } + + if (!env.adminPassword) { + throw new Error('Must provide an admin password') + } + + return { + jwtSecret: env.jwtSecret, + adminPassword: env.adminPassword, + moderatorPassword: env.moderatorPassword ?? env.adminPassword, + triagePassword: + env.triagePassword ?? env.moderatorPassword ?? env.adminPassword, + repoSigningKey, + plcRotationKey, + } +} + +export type ServerSecrets = { + jwtSecret: string + adminPassword: string + moderatorPassword: string + triagePassword: string + repoSigningKey: SigningKeyKms | SigningKeyMemory + plcRotationKey: SigningKeyKms | SigningKeyMemory +} + +export type SigningKeyKms = { + provider: 'kms' + keyId: string +} + +export type SigningKeyMemory = { + provider: 'memory' + privateKeyHex: string +} diff --git a/packages/pds/src/config/util.ts b/packages/pds/src/config/util.ts new file mode 100644 index 00000000000..2bf858621bf --- /dev/null +++ b/packages/pds/src/config/util.ts @@ -0,0 +1,25 @@ +import { parseIntWithFallback } from '@atproto/common' + +export const envInt = (name: string): number | undefined => { + const str = process.env[name] + return parseIntWithFallback(str, undefined) +} + +export const envStr = (name: string): string | undefined => { + const str = process.env[name] + if (str === undefined || str.length === 0) return undefined + return str +} + +export const envBool = (name: string): boolean | undefined => { + const str = process.env[name] + if (str === 'true' || str === '1') return true + if (str === 'false' || str === '0') return false + return undefined +} + +export const envList = (name: string): string[] => { + const str = process.env[name] + if (str === undefined || str.length === 0) return [] + return str.split(',') +} diff --git a/packages/pds/src/context.ts b/packages/pds/src/context.ts index 791100c492b..328b61893a1 100644 --- a/packages/pds/src/context.ts +++ b/packages/pds/src/context.ts @@ -1,68 +1,215 @@ +import * as nodemailer from 'nodemailer' import { Redis } from 'ioredis' import * as plc from '@did-plc/lib' import * as crypto from '@atproto/crypto' import { IdResolver } from '@atproto/identity' import { AtpAgent } from '@atproto/api' +import { KmsKeypair, S3BlobStore } from '@atproto/aws' import { createServiceAuthHeaders } from '@atproto/xrpc-server' import { Database } from './db' -import { ServerConfig } from './config' +import { ServerConfig, ServerSecrets } from './config' import * as auth from './auth' +import { ServerAuth } from './auth' import { ServerMailer } from './mailer' import { ModerationMailer } from './mailer/moderation' import { BlobStore } from '@atproto/repo' -import { Services } from './services' +import { Services, createServices } from './services' import { Sequencer, SequencerLeader } from './sequencer' import { BackgroundQueue } from './background' import DidSqlCache from './did-cache' import { Crawlers } from './crawlers' +import { DiskBlobStore } from './storage' +import { getRedisClient } from './redis' import { RuntimeFlags } from './runtime-flags' +export type AppContextOptions = { + db: Database + blobstore: BlobStore + mailer: ServerMailer + moderationMailer: ModerationMailer + didCache: DidSqlCache + idResolver: IdResolver + plcClient: plc.Client + services: Services + sequencer: Sequencer + sequencerLeader?: SequencerLeader + backgroundQueue: BackgroundQueue + runtimeFlags: RuntimeFlags + redisScratch?: Redis + crawlers: Crawlers + appViewAgent: AtpAgent + auth: auth.ServerAuth + repoSigningKey: crypto.Keypair + plcRotationKey: crypto.Keypair + cfg: ServerConfig +} + export class AppContext { - constructor( - private opts: { - db: Database - blobstore: BlobStore - redisScratch?: Redis - repoSigningKey: crypto.Keypair - plcRotationKey: crypto.Keypair - idResolver: IdResolver - didCache: DidSqlCache - auth: auth.ServerAuth - cfg: ServerConfig - mailer: ServerMailer - moderationMailer: ModerationMailer - services: Services - sequencer: Sequencer - sequencerLeader: SequencerLeader | null - runtimeFlags: RuntimeFlags - backgroundQueue: BackgroundQueue - appviewAgent: AtpAgent - crawlers: Crawlers - }, - ) {} - - get db(): Database { - return this.opts.db - } + public db: Database + public blobstore: BlobStore + public mailer: ServerMailer + public moderationMailer: ModerationMailer + public didCache: DidSqlCache + public idResolver: IdResolver + public plcClient: plc.Client + public services: Services + public sequencer: Sequencer + public sequencerLeader?: SequencerLeader + public backgroundQueue: BackgroundQueue + public runtimeFlags: RuntimeFlags + public redisScratch?: Redis + public crawlers: Crawlers + public appViewAgent: AtpAgent + public auth: auth.ServerAuth + public repoSigningKey: crypto.Keypair + public plcRotationKey: crypto.Keypair + public cfg: ServerConfig - get blobstore(): BlobStore { - return this.opts.blobstore + constructor(opts: AppContextOptions) { + this.db = opts.db + this.blobstore = opts.blobstore + this.mailer = opts.mailer + this.moderationMailer = opts.moderationMailer + this.didCache = opts.didCache + this.idResolver = opts.idResolver + this.plcClient = opts.plcClient + this.services = opts.services + this.sequencer = opts.sequencer + this.sequencerLeader = opts.sequencerLeader + this.backgroundQueue = opts.backgroundQueue + this.runtimeFlags = opts.runtimeFlags + this.redisScratch = opts.redisScratch + this.crawlers = opts.crawlers + this.appViewAgent = opts.appViewAgent + this.auth = opts.auth + this.repoSigningKey = opts.repoSigningKey + this.plcRotationKey = opts.plcRotationKey + this.cfg = opts.cfg } - get redisScratch(): Redis | undefined { - return this.opts.redisScratch - } + static async fromConfig( + cfg: ServerConfig, + secrets: ServerSecrets, + overrides?: Partial, + ): Promise { + const db = + cfg.db.dialect === 'sqlite' + ? Database.sqlite(cfg.db.location) + : Database.postgres({ + url: cfg.db.url, + schema: cfg.db.schema, + poolSize: cfg.db.pool.size, + poolMaxUses: cfg.db.pool.maxUses, + poolIdleTimeoutMs: cfg.db.pool.idleTimeoutMs, + }) + const blobstore = + cfg.blobstore.provider === 's3' + ? new S3BlobStore({ bucket: cfg.blobstore.bucket }) + : await DiskBlobStore.create( + cfg.blobstore.location, + cfg.blobstore.tempLocation, + ) - get repoSigningKey(): crypto.Keypair { - return this.opts.repoSigningKey - } + const mailTransport = + cfg.email !== null + ? nodemailer.createTransport(cfg.email.smtpUrl) + : nodemailer.createTransport({ jsonTransport: true }) - get plcRotationKey(): crypto.Keypair { - return this.opts.plcRotationKey - } + const mailer = new ServerMailer(mailTransport, cfg) + + const modMailTransport = + cfg.moderationEmail !== null + ? nodemailer.createTransport(cfg.moderationEmail.smtpUrl) + : nodemailer.createTransport({ jsonTransport: true }) + + const moderationMailer = new ModerationMailer(modMailTransport, cfg) + + const didCache = new DidSqlCache( + db, + cfg.identity.cacheStaleTTL, + cfg.identity.cacheMaxTTL, + ) + const idResolver = new IdResolver({ + plcUrl: cfg.identity.plcUrl, + didCache, + timeout: cfg.identity.resolverTimeout, + backupNameservers: cfg.identity.handleBackupNameservers, + }) + const plcClient = new plc.Client(cfg.identity.plcUrl) + + const sequencer = new Sequencer(db) + const sequencerLeader = cfg.subscription.sequencerLeaderEnabled + ? new SequencerLeader(db, cfg.subscription.sequencerLeaderLockId) + : undefined + + const backgroundQueue = new BackgroundQueue(db) + const runtimeFlags = new RuntimeFlags(db) + const redisScratch = cfg.redis + ? getRedisClient(cfg.redis.address, cfg.redis.password) + : undefined + + const crawlers = new Crawlers(cfg.service.hostname, cfg.crawlers) - get auth(): auth.ServerAuth { - return this.opts.auth + const appViewAgent = new AtpAgent({ service: cfg.bskyAppView.url }) + + const auth = new ServerAuth({ + jwtSecret: secrets.jwtSecret, + adminPass: secrets.adminPassword, + moderatorPass: secrets.moderatorPassword, + triagePass: secrets.triagePassword, + }) + + const repoSigningKey = + secrets.repoSigningKey.provider === 'kms' + ? await KmsKeypair.load({ + keyId: secrets.repoSigningKey.keyId, + }) + : await crypto.Secp256k1Keypair.import( + secrets.repoSigningKey.privateKeyHex, + ) + + const plcRotationKey = + secrets.plcRotationKey.provider === 'kms' + ? await KmsKeypair.load({ + keyId: secrets.plcRotationKey.keyId, + }) + : await crypto.Secp256k1Keypair.import( + secrets.plcRotationKey.privateKeyHex, + ) + + const services = createServices({ + repoSigningKey, + blobstore, + appViewAgent, + pdsHostname: cfg.service.hostname, + appViewDid: cfg.bskyAppView.did, + appViewCdnUrlPattern: cfg.bskyAppView.cdnUrlPattern, + backgroundQueue, + crawlers, + }) + + return new AppContext({ + db, + blobstore, + mailer, + moderationMailer, + didCache, + idResolver, + plcClient, + services, + sequencer, + sequencerLeader, + backgroundQueue, + runtimeFlags, + redisScratch, + crawlers, + appViewAgent, + auth, + repoSigningKey, + plcRotationKey, + cfg, + ...(overrides ?? {}), + }) } get accessVerifier() { @@ -93,60 +240,8 @@ export class AppContext { return auth.optionalAccessOrRoleVerifier(this.auth) } - get cfg(): ServerConfig { - return this.opts.cfg - } - - get mailer(): ServerMailer { - return this.opts.mailer - } - - get moderationMailer(): ModerationMailer { - return this.opts.moderationMailer - } - - get services(): Services { - return this.opts.services - } - - get sequencer(): Sequencer { - return this.opts.sequencer - } - - get sequencerLeader(): SequencerLeader | null { - return this.opts.sequencerLeader - } - - get runtimeFlags(): RuntimeFlags { - return this.opts.runtimeFlags - } - - get backgroundQueue(): BackgroundQueue { - return this.opts.backgroundQueue - } - - get crawlers(): Crawlers { - return this.opts.crawlers - } - - get plcClient(): plc.Client { - return new plc.Client(this.cfg.didPlcUrl) - } - - get idResolver(): IdResolver { - return this.opts.idResolver - } - - get didCache(): DidSqlCache { - return this.opts.didCache - } - - get appviewAgent(): AtpAgent { - return this.opts.appviewAgent - } - async serviceAuthHeaders(did: string, audience?: string) { - const aud = audience ?? this.cfg.bskyAppViewDid + const aud = audience ?? this.cfg.bskyAppView.did if (!aud) { throw new Error('Could not find bsky appview did') } @@ -156,20 +251,6 @@ export class AppContext { keypair: this.repoSigningKey, }) } - - shouldProxyModeration(): boolean { - return ( - this.cfg.bskyAppViewEndpoint !== undefined && - this.cfg.bskyAppViewModeration === true - ) - } - - canProxyWrite(): boolean { - return ( - this.cfg.bskyAppViewEndpoint !== undefined && - this.cfg.bskyAppViewDid !== undefined - ) - } } export default AppContext diff --git a/packages/pds/src/db/database-schema.ts b/packages/pds/src/db/database-schema.ts index ee92742edff..26159418206 100644 --- a/packages/pds/src/db/database-schema.ts +++ b/packages/pds/src/db/database-schema.ts @@ -1,6 +1,5 @@ import { Kysely } from 'kysely' import * as userAccount from './tables/user-account' -import * as userState from './tables/user-state' import * as userPref from './tables/user-pref' import * as didHandle from './tables/did-handle' import * as repoRoot from './tables/repo-root' @@ -11,22 +10,17 @@ import * as record from './tables/record' import * as backlink from './tables/backlink' import * as ipldBlock from './tables/ipld-block' import * as inviteCode from './tables/invite-code' -import * as notification from './tables/user-notification' import * as blob from './tables/blob' import * as repoBlob from './tables/repo-blob' -import * as deleteAccountToken from './tables/delete-account-token' import * as emailToken from './tables/email-token' import * as moderation from './tables/moderation' -import * as mute from './tables/mute' -import * as listMute from './tables/list-mute' import * as repoSeq from './tables/repo-seq' import * as appMigration from './tables/app-migration' import * as runtimeFlag from './tables/runtime-flag' -export type DatabaseSchemaType = runtimeFlag.PartialDB & - appMigration.PartialDB & +export type DatabaseSchemaType = appMigration.PartialDB & + runtimeFlag.PartialDB & userAccount.PartialDB & - userState.PartialDB & userPref.PartialDB & didHandle.PartialDB & refreshToken.PartialDB & @@ -37,14 +31,10 @@ export type DatabaseSchemaType = runtimeFlag.PartialDB & backlink.PartialDB & ipldBlock.PartialDB & inviteCode.PartialDB & - notification.PartialDB & blob.PartialDB & repoBlob.PartialDB & - deleteAccountToken.PartialDB & emailToken.PartialDB & moderation.PartialDB & - mute.PartialDB & - listMute.PartialDB & repoSeq.PartialDB export type DatabaseSchema = Kysely diff --git a/packages/pds/src/db/migrations/20221021T162202001Z-init.ts b/packages/pds/src/db/migrations/20221021T162202001Z-init.ts deleted file mode 100644 index 90935b8b99c..00000000000 --- a/packages/pds/src/db/migrations/20221021T162202001Z-init.ts +++ /dev/null @@ -1,314 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -const userTable = 'user' -const didHandleTable = 'did_handle' -const sceneTable = 'scene' -const refreshTokenTable = 'refresh_token' -const repoRootTable = 'repo_root' -const recordTable = 'record' -const ipldBlockTable = 'ipld_block' -const ipldBlockCreatorTable = 'ipld_block_creator' -const inviteCodeTable = 'invite_code' -const inviteUseTable = 'invite_code_use' -const notificationTable = 'user_notification' -const assertionTable = 'assertion' -const profileTable = 'profile' -const confirmationTable = 'confirmation' -const followTable = 'follow' -const postTable = 'post' -const postEntityTable = 'post_entity' -const repostTable = 'repost' -const trendTable = 'trend' -const voteTable = 'vote' -const messageQueueTable = 'message_queue' -const messageQueueCursorTable = 'message_queue_cursor' -const sceneMemberCountTable = 'scene_member_count' -const sceneVotesOnPostTable = 'scene_votes_on_post' - -export async function up(db: Kysely, dialect: Dialect): Promise { - if (dialect === 'pg') { - try { - // Add trigram support, supporting user search. - // Explicitly add to public schema, so the extension can be seen in all schemas. - await sql`create extension if not exists pg_trgm with schema public`.execute( - db, - ) - } catch (err: any) { - // The "if not exists" isn't bulletproof against races, and we see test suites racing to - // create the extension. So we can just ignore errors indicating the extension already exists. - if (!err?.detail?.includes?.('(pg_trgm) already exists')) throw err - } - } - - // Postgres uses the type `bytea` for variable length bytes - const binaryDatatype = dialect === 'sqlite' ? 'blob' : sql`bytea` - - // Users - await db.schema - .createTable(userTable) - .addColumn('handle', 'varchar', (col) => col.primaryKey()) - .addColumn('email', 'varchar', (col) => col.notNull().unique()) - .addColumn('password', 'varchar', (col) => col.notNull()) - .addColumn('lastSeenNotifs', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createIndex(`${userTable}_email_lower_idx`) - .unique() - .on(userTable) - .expression(sql`lower("email")`) - .execute() - // Did Handle - await db.schema - .createTable(didHandleTable) - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('handle', 'varchar', (col) => col.unique()) - .addColumn('actorType', 'varchar') - .addColumn('declarationCid', 'varchar') - .execute() - await db.schema - .createIndex(`${didHandleTable}_handle_lower_idx`) - .unique() - .on(didHandleTable) - .expression(sql`lower("handle")`) - .execute() - if (dialect === 'pg') { - await db.schema // Supports user search - .createIndex(`${didHandleTable}_handle_tgrm_idx`) - .on(didHandleTable) - .using('gist') - .expression(sql`"handle" gist_trgm_ops`) - .execute() - } - // Scenes - await db.schema - .createTable(sceneTable) - .addColumn('handle', 'varchar', (col) => col.primaryKey()) - .addColumn('owner', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - // Refresh Tokens - await db.schema - .createTable(refreshTokenTable) - .addColumn('id', 'varchar', (col) => col.primaryKey()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('expiresAt', 'varchar', (col) => col.notNull()) - .execute() - // Repo roots - await db.schema - .createTable(repoRootTable) - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('root', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - // Records - await db.schema - .createTable(recordTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('collection', 'varchar', (col) => col.notNull()) - .addColumn('rkey', 'varchar', (col) => col.notNull()) - .execute() - // Ipld Blocks - await db.schema - .createTable(ipldBlockTable) - .addColumn('cid', 'varchar', (col) => col.primaryKey()) - .addColumn('size', 'integer', (col) => col.notNull()) - .addColumn('content', binaryDatatype, (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - // Ipld Block Creators - await db.schema - .createTable(ipldBlockCreatorTable) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint(`${ipldBlockCreatorTable}_pkey`, ['cid', 'did']) - .execute() - // Invite Codes - await db.schema - .createTable(inviteCodeTable) - .addColumn('code', 'varchar', (col) => col.primaryKey()) - .addColumn('availableUses', 'integer', (col) => col.notNull()) - .addColumn('disabled', 'int2', (col) => col.defaultTo(0)) - .addColumn('forUser', 'varchar', (col) => col.notNull()) - .addColumn('createdBy', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(inviteUseTable) - .addColumn('code', 'varchar', (col) => col.notNull()) - .addColumn('usedBy', 'varchar', (col) => col.notNull()) - .addColumn('usedAt', 'varchar', (col) => col.notNull()) - // Index names need to be unique per schema for postgres - .addPrimaryKeyConstraint(`${inviteUseTable}_pkey`, ['code', 'usedBy']) - .execute() - // Notifications - await db.schema - .createTable(notificationTable) - .addColumn('userDid', 'varchar', (col) => col.notNull()) - .addColumn('recordUri', 'varchar', (col) => col.notNull()) - .addColumn('recordCid', 'varchar', (col) => col.notNull()) - .addColumn('author', 'varchar', (col) => col.notNull()) - .addColumn('reason', 'varchar', (col) => col.notNull()) - .addColumn('reasonSubject', 'varchar') - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - // Assertions - await db.schema - .createTable(assertionTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('assertion', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectDeclarationCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addColumn('confirmUri', 'varchar') - .addColumn('confirmCid', 'varchar') - .addColumn('confirmCreated', 'varchar') - .addColumn('confirmIndexed', 'varchar') - .execute() - // Profiles - await db.schema - .createTable(profileTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('displayName', 'varchar', (col) => col.notNull()) - .addColumn('description', 'varchar') - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - if (dialect === 'pg') { - await db.schema // Supports user search - .createIndex(`${profileTable}_display_name_tgrm_idx`) - .on(profileTable) - .using('gist') - .expression(sql`"displayName" gist_trgm_ops`) - .execute() - } - // Follows - await db.schema - .createTable(followTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectDeclarationCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - // Posts - await db.schema - .createTable(postTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('text', 'varchar', (col) => col.notNull()) - .addColumn('replyRoot', 'varchar') - .addColumn('replyRootCid', 'varchar') - .addColumn('replyParent', 'varchar') - .addColumn('replyParentCid', 'varchar') - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(postEntityTable) - .addColumn('postUri', 'varchar', (col) => col.notNull()) - .addColumn('startIndex', 'integer', (col) => col.notNull()) - .addColumn('endIndex', 'integer', (col) => col.notNull()) - .addColumn('type', 'varchar', (col) => col.notNull()) - .addColumn('value', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(repostTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(trendTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(voteTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('direction', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - - let mqBuilder = db.schema.createTable(messageQueueTable) - mqBuilder = - dialect === 'pg' - ? mqBuilder.addColumn('id', 'serial', (col) => col.primaryKey()) - : mqBuilder.addColumn('id', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - mqBuilder - .addColumn('message', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(messageQueueCursorTable) - .addColumn('consumer', 'varchar', (col) => col.primaryKey()) - .addColumn('cursor', 'integer', (col) => col.notNull()) - .execute() - await db.schema - .createTable(sceneMemberCountTable) - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('count', 'integer', (col) => col.notNull()) - .execute() - await db.schema - .createTable(sceneVotesOnPostTable) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('count', 'integer', (col) => col.notNull()) - .addColumn('postedTrending', 'int2', (col) => col.notNull()) - .addPrimaryKeyConstraint(`${sceneVotesOnPostTable}_pkey`, [ - 'did', - 'subject', - ]) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable(sceneVotesOnPostTable).execute() - await db.schema.dropTable(sceneMemberCountTable).execute() - await db.schema.dropTable(messageQueueCursorTable).execute() - await db.schema.dropTable(messageQueueTable).execute() - await db.schema.dropTable(voteTable).execute() - await db.schema.dropTable(trendTable).execute() - await db.schema.dropTable(repostTable).execute() - await db.schema.dropTable(postEntityTable).execute() - await db.schema.dropTable(postTable).execute() - await db.schema.dropTable(followTable).execute() - await db.schema.dropTable(confirmationTable).execute() - await db.schema.dropTable(assertionTable).execute() - await db.schema.dropTable(profileTable).execute() - await db.schema.dropTable(notificationTable).execute() - await db.schema.dropTable(inviteUseTable).execute() - await db.schema.dropTable(inviteCodeTable).execute() - await db.schema.dropTable(ipldBlockCreatorTable).execute() - await db.schema.dropTable(ipldBlockTable).execute() - await db.schema.dropTable(recordTable).execute() - await db.schema.dropTable(repoRootTable).execute() - await db.schema.dropTable(didHandleTable).execute() - await db.schema.dropTable(userTable).execute() -} diff --git a/packages/pds/src/db/migrations/20221116T234458063Z-duplicate-records.ts b/packages/pds/src/db/migrations/20221116T234458063Z-duplicate-records.ts deleted file mode 100644 index 94aecb5ce8c..00000000000 --- a/packages/pds/src/db/migrations/20221116T234458063Z-duplicate-records.ts +++ /dev/null @@ -1,167 +0,0 @@ -import { Kysely } from 'kysely' - -const duplicateRecordTable = 'duplicate_record' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable(duplicateRecordTable) - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('duplicateOf', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createTable('repost_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('repost_unique_subject', ['creator', 'subject']) - .execute() - await db - .insertInto('repost_temp') - .expression((exp) => - exp - .selectFrom('repost') - .selectAll() - .where('uri', 'in', (qb) => - qb - .selectFrom('repost') - .select(db.fn.min('uri').as('uri')) - .groupBy(['creator', 'subject']), - ), - ) - .execute() - await db.schema.dropTable('repost').execute() - await db.schema.alterTable('repost_temp').renameTo('repost').execute() - - await db.schema - .createTable('trend_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('trend_unique_subject', ['creator', 'subject']) - .execute() - await db - .insertInto('trend_temp') - .expression((exp) => - exp - .selectFrom('trend') - .selectAll() - .where('uri', 'in', (qb) => - qb - .selectFrom('trend') - .select(db.fn.min('uri').as('uri')) - .groupBy(['creator', 'subject']), - ), - ) - .execute() - await db.schema.dropTable('trend').execute() - await db.schema.alterTable('trend_temp').renameTo('trend').execute() - - await db.schema - .createTable('vote_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('direction', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('vote_unique_subject', ['creator', 'subject']) - .execute() - await db - .insertInto('vote_temp') - .expression((exp) => - exp - .selectFrom('vote') - .selectAll() - .where('uri', 'in', (qb) => - qb - .selectFrom('vote') - .select(db.fn.min('uri').as('uri')) - .groupBy(['creator', 'subject']), - ), - ) - .execute() - await db.schema.dropTable('vote').execute() - await db.schema.alterTable('vote_temp').renameTo('vote').execute() - - await db.schema - .createTable('follow_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectDeclarationCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('follow_unique_subject', ['creator', 'subjectDid']) - .execute() - await db - .insertInto('follow_temp') - .expression((exp) => - exp - .selectFrom('follow') - .selectAll() - .where('uri', 'in', (qb) => - qb - .selectFrom('follow') - .select(db.fn.min('uri').as('uri')) - .groupBy(['creator', 'subjectDid']), - ), - ) - .execute() - await db.schema.dropTable('follow').execute() - await db.schema.alterTable('follow_temp').renameTo('follow').execute() - - await db.schema - .createTable('assertion_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('assertion', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectDeclarationCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addColumn('confirmUri', 'varchar') - .addColumn('confirmCid', 'varchar') - .addColumn('confirmCreated', 'varchar') - .addColumn('confirmIndexed', 'varchar') - .addUniqueConstraint('assertion_unique_subject', [ - 'creator', - 'subjectDid', - 'assertion', - ]) - .execute() - await db - .insertInto('assertion_temp') - .expression((exp) => - exp - .selectFrom('assertion') - .selectAll() - .where('uri', 'in', (qb) => - qb - .selectFrom('assertion') - .select(db.fn.min('uri').as('uri')) - .groupBy(['creator', 'subjectDid', 'assertion']), - ), - ) - .execute() - await db.schema.dropTable('assertion').execute() - await db.schema.alterTable('assertion_temp').renameTo('assertion').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable(duplicateRecordTable) -} diff --git a/packages/pds/src/db/migrations/20221202T212459280Z-blobs.ts b/packages/pds/src/db/migrations/20221202T212459280Z-blobs.ts deleted file mode 100644 index 1be7a4cb09b..00000000000 --- a/packages/pds/src/db/migrations/20221202T212459280Z-blobs.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Kysely } from 'kysely' - -const blobTable = 'blob' -const repoBlobTable = 'repo_blob' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable(blobTable) - .addColumn('cid', 'varchar', (col) => col.primaryKey()) - .addColumn('mimeType', 'varchar', (col) => col.notNull()) - .addColumn('size', 'integer', (col) => col.notNull()) - .addColumn('tempKey', 'varchar') - .addColumn('width', 'integer') - .addColumn('height', 'integer') - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(repoBlobTable) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('recordUri', 'varchar', (col) => col.notNull()) - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint(`${repoBlobTable}_pkey`, ['cid', 'recordUri']) - .execute() - - await db.schema - .alterTable('profile') - .addColumn('avatarCid', 'varchar') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.alterTable('profile').dropColumn('avatarCid').execute() - await db.schema.dropTable(repoBlobTable).execute() - await db.schema.dropTable(blobTable).execute() -} diff --git a/packages/pds/src/db/migrations/20221209T210026294Z-banners.ts b/packages/pds/src/db/migrations/20221209T210026294Z-banners.ts deleted file mode 100644 index 3b140f957de..00000000000 --- a/packages/pds/src/db/migrations/20221209T210026294Z-banners.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable('profile') - .addColumn('bannerCid', 'varchar') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.alterTable('profile').dropColumn('bannerCid').execute() -} diff --git a/packages/pds/src/db/migrations/20221212T195416407Z-post-media.ts b/packages/pds/src/db/migrations/20221212T195416407Z-post-media.ts deleted file mode 100644 index d82d077b671..00000000000 --- a/packages/pds/src/db/migrations/20221212T195416407Z-post-media.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Kysely } from 'kysely' - -const postEmbedImageTable = 'post_embed_image' -const postEmbedExternalTable = 'post_embed_external' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable(postEmbedImageTable) - .addColumn('postUri', 'varchar', (col) => col.notNull()) - .addColumn('position', 'varchar', (col) => col.notNull()) - .addColumn('imageCid', 'varchar', (col) => col.notNull()) - .addColumn('alt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint(`${postEmbedImageTable}_pkey`, [ - 'postUri', - 'position', - ]) - .execute() - await db.schema - .createTable(postEmbedExternalTable) - .addColumn('postUri', 'varchar', (col) => col.primaryKey()) - .addColumn('uri', 'varchar', (col) => col.notNull()) - .addColumn('title', 'varchar', (col) => col.notNull()) - .addColumn('description', 'varchar', (col) => col.notNull()) - .addColumn('thumbCid', 'varchar') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable(postEmbedExternalTable).execute() - await db.schema.dropTable(postEmbedImageTable).execute() -} diff --git a/packages/pds/src/db/migrations/20221215T220356370Z-password-reset-otp.ts b/packages/pds/src/db/migrations/20221215T220356370Z-password-reset-otp.ts deleted file mode 100644 index 122d2451f1d..00000000000 --- a/packages/pds/src/db/migrations/20221215T220356370Z-password-reset-otp.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Kysely } from 'kysely' - -const userTable = 'user' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable(userTable) - .addColumn('passwordResetToken', 'varchar') - .execute() - await db.schema - .alterTable(userTable) - .addColumn('passwordResetGrantedAt', 'varchar') - .execute() - await db.schema - .createIndex('user_password_reset_token_idx') - .unique() - .on('user') - .column('passwordResetToken') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .dropIndex('user_password_reset_token_idx') - .on('user') - .execute() - await db.schema - .alterTable(userTable) - .dropColumn('passwordResetToken') - .execute() - await db.schema - .alterTable(userTable) - .dropColumn('passwordResetGrantedAt') - .execute() -} diff --git a/packages/pds/src/db/migrations/20221226T213635517Z-mute-init.ts b/packages/pds/src/db/migrations/20221226T213635517Z-mute-init.ts deleted file mode 100644 index 79f429cc9d7..00000000000 --- a/packages/pds/src/db/migrations/20221226T213635517Z-mute-init.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('mute') - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('mutedByDid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('mute_pkey', ['mutedByDid', 'did']) - .execute() - // Unrelated to muting: add notification indexing for per-user notifications - await db.schema - .createIndex('user_notification_did_indexed_at_idx') - .on('user_notification') - .columns(['userDid', 'indexedAt']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('mute').execute() - await db.schema - .dropIndex('user_notification_did_indexed_at_idx') - .on('user_notification') - .execute() -} diff --git a/packages/pds/src/db/migrations/20221230T215012029Z-moderation-init.ts b/packages/pds/src/db/migrations/20221230T215012029Z-moderation-init.ts deleted file mode 100644 index fad76183367..00000000000 --- a/packages/pds/src/db/migrations/20221230T215012029Z-moderation-init.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - // Moderation action - let builder = db.schema.createTable('moderation_action') - builder = - dialect === 'pg' - ? builder.addColumn('id', 'serial', (col) => col.primaryKey()) - : builder.addColumn('id', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - await builder - .addColumn('action', 'varchar', (col) => col.notNull()) - .addColumn('subjectType', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectUri', 'varchar') - .addColumn('subjectCid', 'varchar') - .addColumn('reason', 'text', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('createdBy', 'varchar', (col) => col.notNull()) - .addColumn('reversedAt', 'varchar') - .addColumn('reversedBy', 'varchar') - .addColumn('reversedReason', 'text') - .execute() - // Repo takedowns - await db.schema - .alterTable('repo_root') - .addColumn('takedownId', 'integer') - .execute() - // Record takedowns - await db.schema - .alterTable('record') - .addColumn('takedownId', 'integer') - .execute() - if (dialect !== 'sqlite') { - // Would have to recreate table in sqlite to add these constraints - await db.schema - .alterTable('repo_root') - .addForeignKeyConstraint( - 'repo_root_takedown_id_fkey', - ['takedownId'], - 'moderation_action', - ['id'], - ) - .execute() - await db.schema - .alterTable('record') - .addForeignKeyConstraint( - 'record_takedown_id_fkey', - ['takedownId'], - 'moderation_action', - ['id'], - ) - .execute() - } - // Moderation report - builder = db.schema.createTable('moderation_report') - builder = - dialect === 'pg' - ? builder.addColumn('id', 'serial', (col) => col.primaryKey()) - : builder.addColumn('id', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - await builder - .addColumn('subjectType', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectUri', 'varchar') - .addColumn('subjectCid', 'varchar') - .addColumn('reasonType', 'varchar', (col) => col.notNull()) - .addColumn('reason', 'text') - .addColumn('reportedByDid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - // Moderation report resolutions - await db.schema - .createTable('moderation_report_resolution') - .addColumn('reportId', 'integer', (col) => - col.notNull().references('moderation_report.id'), - ) - .addColumn('actionId', 'integer', (col) => - col.notNull().references('moderation_action.id'), - ) - .addColumn('createdBy', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('moderation_report_resolution_pkey', [ - 'reportId', - 'actionId', - ]) - .execute() - await db.schema - .createIndex('moderation_report_resolution_action_id_idx') - .on('moderation_report_resolution') - .column('actionId') - .execute() -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - await db.schema.dropTable('moderation_report_resolution').execute() - await db.schema.dropTable('moderation_report').execute() - if (dialect !== 'sqlite') { - await db.schema - .alterTable('repo_root') - .dropConstraint('repo_root_takedown_id_fkey') - .execute() - await db.schema - .alterTable('record') - .dropConstraint('record_takedown_id_fkey') - .execute() - } - await db.schema.alterTable('repo_root').dropColumn('takedownId').execute() - await db.schema.alterTable('record').dropColumn('takedownId').execute() - await db.schema.dropTable('moderation_action').execute() -} diff --git a/packages/pds/src/db/migrations/20230127T215753149Z-indexed-at-on-record.ts b/packages/pds/src/db/migrations/20230127T215753149Z-indexed-at-on-record.ts deleted file mode 100644 index d43c9a2c0b8..00000000000 --- a/packages/pds/src/db/migrations/20230127T215753149Z-indexed-at-on-record.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - const now = new Date().toISOString() - await db.schema - .alterTable('record') - .addColumn('indexedAt', 'varchar', (col) => col.notNull().defaultTo(now)) - .execute() - - const ref = db.dynamic.ref - - const indexedAtForRecordQb = db - .selectFrom('ipld_block') - .whereRef('ipld_block.cid', '=', ref('record.cid')) - .select('indexedAt') - - await db - .updateTable('record') - .set({ - indexedAt: indexedAtForRecordQb, - }) - .whereExists(indexedAtForRecordQb) - .execute() - - await db.schema.alterTable('ipld_block').dropColumn('indexedAt').execute() -} - -export async function down(db: Kysely): Promise { - const now = new Date().toISOString() - await db.schema - .alterTable('ipld_block') - .addColumn('indexedAt', 'varchar', (col) => col.notNull().defaultTo(now)) - .execute() - await db.schema.alterTable('record').dropColumn('indexedAt').execute() -} diff --git a/packages/pds/src/db/migrations/20230127T224743452Z-repo-sync-data-pt1.ts b/packages/pds/src/db/migrations/20230127T224743452Z-repo-sync-data-pt1.ts deleted file mode 100644 index 37723fcff26..00000000000 --- a/packages/pds/src/db/migrations/20230127T224743452Z-repo-sync-data-pt1.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { Kysely } from 'kysely' - -const commitBlockTable = 'repo_commit_block' -const commitHistoryTable = 'repo_commit_history' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable(commitBlockTable) - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('block', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint(`${commitBlockTable}_pkey`, [ - 'creator', - 'commit', - 'block', - ]) - .execute() - await db.schema - .createTable(commitHistoryTable) - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('prev', 'varchar') - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint(`${commitHistoryTable}_pkey`, [ - 'creator', - 'commit', - ]) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable(commitHistoryTable).execute() - await db.schema.dropTable(commitBlockTable).execute() -} diff --git a/packages/pds/src/db/migrations/20230201T200606704Z-repo-sync-data-pt2.ts b/packages/pds/src/db/migrations/20230201T200606704Z-repo-sync-data-pt2.ts deleted file mode 100644 index 4b84ddce071..00000000000 --- a/packages/pds/src/db/migrations/20230201T200606704Z-repo-sync-data-pt2.ts +++ /dev/null @@ -1,4 +0,0 @@ -// @NOTE This migration was all data and did not involve any schema changes - -export async function up(): Promise {} -export async function down(): Promise {} diff --git a/packages/pds/src/db/migrations/20230202T170426672Z-user-partitioned-cids.ts b/packages/pds/src/db/migrations/20230202T170426672Z-user-partitioned-cids.ts deleted file mode 100644 index 5f37a46ef3a..00000000000 --- a/packages/pds/src/db/migrations/20230202T170426672Z-user-partitioned-cids.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - const binaryDatatype = dialect === 'sqlite' ? 'blob' : sql`bytea` - - await db.schema - .createTable('ipld_block_temp') - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('size', 'integer', (col) => col.notNull()) - .addColumn('content', binaryDatatype, (col) => col.notNull()) - .addPrimaryKeyConstraint('ipld_block_with_creator_pkey', ['creator', 'cid']) - .execute() - - await db - .insertInto('ipld_block_temp') - .columns(['cid', 'creator', 'size', 'content']) - .expression((exp) => - exp - .selectFrom('ipld_block') - .innerJoin( - 'ipld_block_creator', - 'ipld_block_creator.cid', - 'ipld_block.cid', - ) - .select([ - 'ipld_block.cid', - 'ipld_block_creator.did', - 'ipld_block.size', - 'ipld_block.content', - ]), - ) - .execute() - - await db.schema.dropTable('ipld_block').execute() - await db.schema.dropTable('ipld_block_creator').execute() - await db.schema.alterTable('ipld_block_temp').renameTo('ipld_block').execute() -} - -export async function down(db: Kysely, dialect: Dialect): Promise { - const binaryDatatype = dialect === 'sqlite' ? 'blob' : sql`bytea` - - await db.schema - .createTable('ipld_block_creator') - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint(`ipld_block_creator_pkey`, ['cid', 'did']) - .execute() - await db - .insertInto('ipld_block_creator') - .expression((exp) => - exp - .selectFrom('ipld_block') - .select(['ipld_block.cid as cid', 'ipld_block.creator as did']), - ) - .execute() - - await db.schema - .createTable('ipld_block_temp') - .addColumn('cid', 'varchar', (col) => col.primaryKey()) - .addColumn('size', 'integer', (col) => col.notNull()) - .addColumn('content', binaryDatatype, (col) => col.notNull()) - .execute() - - await db - .insertInto('ipld_block_temp') - .expression((exp) => - exp - .selectFrom('ipld_block') - .select([ - 'ipld_block.cid as cid', - 'ipld_block.size as size', - 'ipld_block.content as content', - ]) - .distinct(), - ) - .execute() - - await db.schema.dropTable('ipld_block').execute() - await db.schema.alterTable('ipld_block_temp').renameTo('ipld_block').execute() -} diff --git a/packages/pds/src/db/migrations/20230202T170435937Z-delete-account-token.ts b/packages/pds/src/db/migrations/20230202T170435937Z-delete-account-token.ts deleted file mode 100644 index c517cc27c3f..00000000000 --- a/packages/pds/src/db/migrations/20230202T170435937Z-delete-account-token.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Kysely } from 'kysely' - -const deleteTokenTable = 'delete_account_token' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable(deleteTokenTable) - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('token', 'varchar', (col) => col.notNull()) - .addColumn('requestedAt', 'varchar', (col) => col.notNull()) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable(deleteTokenTable).execute() -} diff --git a/packages/pds/src/db/migrations/20230202T172831900Z-moderation-subject-blob.ts b/packages/pds/src/db/migrations/20230202T172831900Z-moderation-subject-blob.ts deleted file mode 100644 index 80bddefdd09..00000000000 --- a/packages/pds/src/db/migrations/20230202T172831900Z-moderation-subject-blob.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - // Track relevant subject blobs on action - await db.schema - .createTable('moderation_action_subject_blob') - .addColumn('actionId', 'integer', (col) => col.notNull()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('recordUri', 'varchar', (col) => col.notNull()) - .addForeignKeyConstraint( - 'moderation_action_subject_blob_action_id_fkey', - ['actionId'], - 'moderation_action', - ['id'], - ) - .addForeignKeyConstraint( - 'moderation_action_subject_blob_repo_blob_fkey', - ['cid', 'recordUri'], - 'repo_blob', - ['cid', 'recordUri'], - ) - .addPrimaryKeyConstraint('moderation_action_subject_blob_pkey', [ - 'actionId', - 'cid', - 'recordUri', - ]) - .execute() - // Blob takedowns - await db.schema - .alterTable('repo_blob') - .addColumn('takedownId', 'integer') - .execute() - if (dialect !== 'sqlite') { - // Would have to recreate table in sqlite to add these constraints - await db.schema - .alterTable('repo_blob') - .addForeignKeyConstraint( - 'repo_blob_takedown_id_fkey', - ['takedownId'], - 'moderation_action', - ['id'], - ) - .execute() - } -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - await db.schema.dropTable('moderation_action_subject_blob').execute() - if (dialect !== 'sqlite') { - await db.schema - .alterTable('repo_blob') - .dropConstraint('repo_blob_takedown_id_fkey') - .execute() - } - await db.schema.alterTable('repo_blob').dropColumn('takedownId').execute() -} diff --git a/packages/pds/src/db/migrations/20230202T213952826Z-repo-seq.ts b/packages/pds/src/db/migrations/20230202T213952826Z-repo-seq.ts deleted file mode 100644 index 7bf67fc4ffb..00000000000 --- a/packages/pds/src/db/migrations/20230202T213952826Z-repo-seq.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -const repoSeqTable = 'repo_seq' -const repoSeqDidIndex = 'repo_seq_did_index' -const repoSeqCommitIndex = 'repo_seq_commit_index' - -export async function up(db: Kysely, dialect: Dialect): Promise { - let builder = db.schema.createTable(repoSeqTable) - if (dialect === 'pg') { - builder = builder.addColumn('seq', 'serial', (col) => col.primaryKey()) - } else { - builder = builder.addColumn('seq', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - } - await builder - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createIndex(repoSeqDidIndex) - .on(repoSeqTable) - .column('did') - .execute() - - await db.schema - .createIndex(repoSeqCommitIndex) - .on(repoSeqTable) - .column('commit') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex(repoSeqCommitIndex).execute() - await db.schema.dropIndex(repoSeqDidIndex).execute() - await db.schema.dropTable(repoSeqTable).execute() -} diff --git a/packages/pds/src/db/migrations/20230208T081544325Z-post-hydrate-indices.ts b/packages/pds/src/db/migrations/20230208T081544325Z-post-hydrate-indices.ts deleted file mode 100644 index 4e57a4fb9af..00000000000 --- a/packages/pds/src/db/migrations/20230208T081544325Z-post-hydrate-indices.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - // for, eg, "upvoteCount" on posts in feed views - await db.schema - .createIndex('vote_subject_direction_idx') - .on('vote') - .columns(['subject', 'direction']) - .execute() - - // for, eg, "repostCount" on posts in feed views - await db.schema - .createIndex('repost_subject_idx') - .on('repost') - .column('subject') - .execute() - - // for, eg, "replyCount" on posts in feed views - await db.schema - .createIndex('post_replyparent_idx') - .on('post') - .column('replyParent') - .execute() - - // for, eg, "followersCount" on profile views - await db.schema - .createIndex('follow_subjectdid_idx') - .on('follow') - .column('subjectDid') - .execute() - - // for, eg, "postsCount" on profile views - await db.schema - .createIndex('post_creator_idx') - .on('post') - .column('creator') - .execute() - - // for, eg, profile views - await db.schema - .createIndex('profile_creator_idx') - .on('profile') - .column('creator') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('vote_subject_direction_idx').execute() - await db.schema.dropIndex('repost_subject_idx').execute() - await db.schema.dropIndex('post_replyparent_idx').execute() - await db.schema.dropIndex('follow_subjectdid_idx').execute() - await db.schema.dropIndex('post_creator_idx').execute() - await db.schema.dropIndex('profile_creator_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230208T222001557Z-user-table-did-pkey.ts b/packages/pds/src/db/migrations/20230208T222001557Z-user-table-did-pkey.ts deleted file mode 100644 index 3de1fd0aefd..00000000000 --- a/packages/pds/src/db/migrations/20230208T222001557Z-user-table-did-pkey.ts +++ /dev/null @@ -1,133 +0,0 @@ -import { Kysely, sql } from 'kysely' - -export async function up(db: Kysely): Promise { - // create switch user -> user_account with did primaryKey - await db.schema - .createTable('user_account') - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('email', 'varchar', (col) => col.notNull().unique()) - .addColumn('passwordScrypt', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('passwordResetToken', 'varchar') - .addColumn('passwordResetGrantedAt', 'varchar') - .execute() - await db - .insertInto('user_account') - .columns([ - 'did', - 'email', - 'passwordScrypt', - 'createdAt', - 'passwordResetToken', - 'passwordResetGrantedAt', - ]) - .expression((exp) => - exp - .selectFrom('user') - .innerJoin('did_handle', 'did_handle.handle', 'user.handle') - .select([ - 'did_handle.did', - 'user.email', - 'user.password', - 'user.createdAt', - 'user.passwordResetToken', - 'user.passwordResetGrantedAt', - ]), - ) - .execute() - - // add indices - await db.schema - .createIndex(`user_account_email_lower_idx`) - .unique() - .on('user_account') - .expression(sql`lower("email")`) - .execute() - await db.schema - .createIndex('user_account_password_reset_token_idx') - .unique() - .on('user_account') - .column('passwordResetToken') - .execute() - - // move notifsLastSeen to a new user_state table - await db.schema - .createTable('user_state') - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('lastSeenNotifs', 'varchar', (col) => col.notNull()) - .execute() - await db - .insertInto('user_state') - .columns(['did', 'lastSeenNotifs']) - .expression((exp) => - exp - .selectFrom('user') - .innerJoin('did_handle', 'did_handle.handle', 'user.handle') - .select(['did_handle.did', 'user.lastSeenNotifs']), - ) - .execute() - - // drop old tables & indices - await db.schema.dropIndex('user_email_lower_idx').execute() - await db.schema.dropIndex('user_password_reset_token_idx').execute() - await db.schema.dropTable('user').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('user') - .addColumn('handle', 'varchar', (col) => col.primaryKey()) - .addColumn('email', 'varchar', (col) => col.notNull().unique()) - .addColumn('password', 'varchar', (col) => col.notNull()) - .addColumn('lastSeenNotifs', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('passwordResetToken', 'varchar') - .addColumn('passwordResetGrantedAt', 'varchar') - .execute() - - await db - .insertInto('user') - .columns([ - 'handle', - 'email', - 'password', - 'lastSeenNotifs', - 'createdAt', - 'passwordResetToken', - 'passwordResetGrantedAt', - ]) - .expression((exp) => - exp - .selectFrom('user_account') - .innerJoin('did_handle', 'did_handle.did', 'user_account.did') - .innerJoin('user_state', 'user_state.did', 'did_handle.did') - .select([ - 'did_handle.handle', - 'user_account.email', - 'user_account.passwordScrypt', - 'user_state.lastSeenNotifs', - 'user_account.createdAt', - 'user_account.passwordResetToken', - 'user_account.passwordResetGrantedAt', - ]), - ) - .execute() - - await db.schema - .createIndex(`user_email_lower_idx`) - .unique() - .on('user') - .expression(sql`lower("email")`) - .execute() - await db.schema - .createIndex('user_password_reset_token_idx') - .unique() - .on('user') - .column('passwordResetToken') - .execute() - - await db.schema.dropTable('user_state').execute() - await db.schema.dropIndex('user_account_email_lower_idx').execute() - await db.schema.dropIndex('user_account_password_reset_token_idx').execute() - await db.schema.dropTable('user_account').execute() -} diff --git a/packages/pds/src/db/migrations/20230210T210132396Z-post-hierarchy.ts b/packages/pds/src/db/migrations/20230210T210132396Z-post-hierarchy.ts deleted file mode 100644 index 894cb4db80c..00000000000 --- a/packages/pds/src/db/migrations/20230210T210132396Z-post-hierarchy.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { Kysely, sql } from 'kysely' - -export async function up( - db: Kysely<{ post: Post; post_hierarchy: PostHierarchy }>, -): Promise { - await db.schema - .createTable('post_hierarchy') - .addColumn('uri', 'varchar', (col) => col.notNull()) - .addColumn('ancestorUri', 'varchar', (col) => col.notNull()) - .addColumn('depth', 'integer', (col) => col.notNull()) - .addPrimaryKeyConstraint('post_hierarchy_pkey', ['uri', 'ancestorUri']) - .execute() - - // Supports fetching all children for a post - await db.schema - .createIndex('post_hierarchy_ancestoruri_idx') - .on('post_hierarchy') - .column('ancestorUri') - .execute() - - const postHierarchyQb = db - .withRecursive('hierarchy(uri, ancestorUri, depth)', (cte) => { - return cte - .selectFrom('post') - .select([ - 'post.uri as uri', - 'post.uri as ancestorUri', - sql`0`.as('depth'), - ]) - .unionAll( - cte - .selectFrom('post') - .innerJoin('hierarchy', 'hierarchy.ancestorUri', 'post.uri') - .where('post.replyParent', 'is not', null) - .select([ - 'hierarchy.uri as uri', - sql`post."replyParent"`.as('ancestorUri'), - sql`hierarchy.depth + 1`.as('depth'), - ]), - ) - }) - .selectFrom('hierarchy') - - await db - .insertInto('post_hierarchy') - .columns(['uri', 'ancestorUri', 'depth']) - .expression(postHierarchyQb.select(['uri', 'ancestorUri', 'depth'])) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('post_hierarchy_ancestoruri_idx').execute() - await db.schema.dropTable('post_hierarchy').execute() -} - -type Post = { - uri: string - replyParent: string | null -} - -type PostHierarchy = { - uri: string - ancestorUri: string - depth: number -} diff --git a/packages/pds/src/db/migrations/20230214T172233550Z-embed-records.ts b/packages/pds/src/db/migrations/20230214T172233550Z-embed-records.ts deleted file mode 100644 index 0d2c6a7afdd..00000000000 --- a/packages/pds/src/db/migrations/20230214T172233550Z-embed-records.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('post_embed_record') - .addColumn('postUri', 'varchar', (col) => col.notNull()) - .addColumn('embedUri', 'varchar', (col) => col.notNull()) - .addColumn('embedCid', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('post_embed_record_pkey', ['postUri', 'embedUri']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('post_embed_record').execute() -} diff --git a/packages/pds/src/db/migrations/20230301T222603402Z-repo-ops.ts b/packages/pds/src/db/migrations/20230301T222603402Z-repo-ops.ts deleted file mode 100644 index 5a4bd6820b6..00000000000 --- a/packages/pds/src/db/migrations/20230301T222603402Z-repo-ops.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('repo_op') - .addColumn('did', 'text', (col) => col.notNull()) - .addColumn('commit', 'text', (col) => col.notNull()) - .addColumn('action', 'text', (col) => col.notNull()) - .addColumn('path', 'text', (col) => col.notNull()) - .addColumn('cid', 'text') - .addPrimaryKeyConstraint('repo_op_pkey', ['did', 'commit', 'path']) - .execute() - - await db.deleteFrom('repo_seq').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('repo_op').execute() -} diff --git a/packages/pds/src/db/migrations/20230304T193548198Z-pagination-indices.ts b/packages/pds/src/db/migrations/20230304T193548198Z-pagination-indices.ts deleted file mode 100644 index 21278b14f59..00000000000 --- a/packages/pds/src/db/migrations/20230304T193548198Z-pagination-indices.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createIndex('post_order_by_idx') - .on('post') - .columns(['indexedAt', 'cid']) - .execute() - - await db.schema - .createIndex('repost_order_by_idx') - .on('repost') - .columns(['indexedAt', 'cid']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('repost_order_by_idx').execute() - await db.schema.dropIndex('post_order_by_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230308T234640077Z-record-indexes.ts b/packages/pds/src/db/migrations/20230308T234640077Z-record-indexes.ts deleted file mode 100644 index 12e820f6d5e..00000000000 --- a/packages/pds/src/db/migrations/20230308T234640077Z-record-indexes.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createIndex('record_cid_index') - .on('record') - .columns(['did', 'cid']) - .execute() - - await db.schema - .createIndex('record_collection_index') - .on('record') - .columns(['did', 'collection']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('record_cid_index').execute() - await db.schema.dropIndex('record_collection_index').execute() -} diff --git a/packages/pds/src/db/migrations/20230309T012947663Z-app-migration.ts b/packages/pds/src/db/migrations/20230309T012947663Z-app-migration.ts deleted file mode 100644 index 229c91ffe25..00000000000 --- a/packages/pds/src/db/migrations/20230309T012947663Z-app-migration.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('app_migration') - .addColumn('id', 'varchar', (col) => col.notNull().primaryKey()) - .addColumn('success', 'int2', (col) => col.notNull().defaultTo(0)) - .addColumn('completedAt', 'varchar') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('app_migration').execute() -} diff --git a/packages/pds/src/db/migrations/20230310T205728933Z-subscription-init.ts b/packages/pds/src/db/migrations/20230310T205728933Z-subscription-init.ts deleted file mode 100644 index 1c03ccb798d..00000000000 --- a/packages/pds/src/db/migrations/20230310T205728933Z-subscription-init.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('subscription') - .addColumn('service', 'varchar', (col) => col.notNull()) - .addColumn('method', 'varchar', (col) => col.notNull()) - .addColumn('state', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('subscription_pkey', ['service', 'method']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('subscription').execute() -} diff --git a/packages/pds/src/db/migrations/20230313T232322844Z-blob-creator.ts b/packages/pds/src/db/migrations/20230313T232322844Z-blob-creator.ts deleted file mode 100644 index 117a72108c7..00000000000 --- a/packages/pds/src/db/migrations/20230313T232322844Z-blob-creator.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('blob_new') - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('mimeType', 'varchar', (col) => col.notNull()) - .addColumn('size', 'integer', (col) => col.notNull()) - .addColumn('tempKey', 'varchar') - .addColumn('width', 'integer') - .addColumn('height', 'integer') - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('blob_creator_pkey', ['creator', 'cid']) - .execute() - - await db - .insertInto('blob_new') - .columns([ - 'creator', - 'cid', - 'mimeType', - 'size', - 'tempKey', - 'width', - 'height', - 'createdAt', - ]) - .expression((exp) => - exp - .selectFrom('blob') - .innerJoin('repo_blob', 'repo_blob.cid', 'blob.cid') - .select([ - 'repo_blob.did', - 'blob.cid', - 'blob.mimeType', - 'blob.size', - 'blob.tempKey', - 'blob.width', - 'blob.height', - 'blob.createdAt', - ]) - // kinda silly, but we need a WHERE clause so that the ON CONFLICT parses correctly - .where('repo_blob.did', 'is not', null), - ) - .onConflict((oc) => oc.doNothing()) - .execute() - - await db.schema.dropTable('blob').execute() - await db.schema.alterTable('blob_new').renameTo('blob').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('blob_new') - .addColumn('cid', 'varchar', (col) => col.primaryKey()) - .addColumn('mimeType', 'varchar', (col) => col.notNull()) - .addColumn('size', 'integer', (col) => col.notNull()) - .addColumn('tempKey', 'varchar') - .addColumn('width', 'integer') - .addColumn('height', 'integer') - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - - await db - .insertInto('blob_new') - .columns([ - 'cid', - 'mimeType', - 'size', - 'tempKey', - 'width', - 'height', - 'createdAt', - ]) - .expression((exp) => - exp - .selectFrom('blob') - .select([ - 'blob.cid', - 'blob.mimeType', - 'blob.size', - 'blob.tempKey', - 'blob.width', - 'blob.height', - 'blob.createdAt', - ]) - // kinda silly, but we need a WHERE clause so that the ON CONFLICT parses correctly - .where('cid', 'is not', null), - ) - .onConflict((oc) => oc.doNothing()) - .execute() - - await db.schema.dropTable('blob').execute() - await db.schema.alterTable('blob_new').renameTo('blob').execute() -} diff --git a/packages/pds/src/db/migrations/20230314T023842127Z-refresh-grace-period.ts b/packages/pds/src/db/migrations/20230314T023842127Z-refresh-grace-period.ts deleted file mode 100644 index f7755aea7fd..00000000000 --- a/packages/pds/src/db/migrations/20230314T023842127Z-refresh-grace-period.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable('refresh_token') - .addColumn('nextId', 'varchar') - .execute() - await db.schema // Aids in refresh token cleanup - .createIndex('refresh_token_did_idx') - .on('refresh_token') - .column('did') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('refresh_token_did_idx').execute() - await db.schema.alterTable('refresh_token').dropColumn('nextId').execute() -} diff --git a/packages/pds/src/db/migrations/20230323T162732466Z-remove-scenes.ts b/packages/pds/src/db/migrations/20230323T162732466Z-remove-scenes.ts deleted file mode 100644 index ea2ac910754..00000000000 --- a/packages/pds/src/db/migrations/20230323T162732466Z-remove-scenes.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema.dropTable('scene').execute() - await db.schema.dropTable('trend').execute() - await db.schema.dropTable('scene_member_count').execute() - await db.schema.dropTable('scene_votes_on_post').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('scene') - .addColumn('handle', 'varchar', (col) => col.primaryKey()) - .addColumn('owner', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createTable('trend') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createTable('scene_member_count') - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('count', 'integer', (col) => col.notNull()) - .execute() - await db.schema - .createTable('scene_votes_on_post') - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('count', 'integer', (col) => col.notNull()) - .addColumn('postedTrending', 'int2', (col) => col.notNull()) - .addPrimaryKeyConstraint(`scene_votes_on_post_pkey`, ['did', 'subject']) - .execute() -} diff --git a/packages/pds/src/db/migrations/20230328T214311000Z-remove-declarations-assertions-confirmations.ts b/packages/pds/src/db/migrations/20230328T214311000Z-remove-declarations-assertions-confirmations.ts deleted file mode 100644 index 094bd68d39f..00000000000 --- a/packages/pds/src/db/migrations/20230328T214311000Z-remove-declarations-assertions-confirmations.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db - .deleteFrom('duplicate_record') - .where( - 'duplicateOf', - 'in', - db.selectFrom('assertion').select('assertion.uri'), - ) - .orWhere( - 'duplicateOf', - 'in', - db.selectFrom('assertion').select('assertion.confirmUri'), - ) - .execute() - await db.schema.dropTable('assertion').execute() - await db.schema - .alterTable('follow') - .dropColumn('subjectDeclarationCid') - .execute() - await db.schema - .alterTable('did_handle') - .dropColumn('declarationCid') - .execute() - await db.schema.alterTable('did_handle').dropColumn('actorType').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .alterTable('did_handle') - .addColumn('actorType', 'varchar') - .execute() - await db.schema - .alterTable('did_handle') - .addColumn('declarationCid', 'varchar') - .execute() - await db.schema - .alterTable('follow') - .addColumn('subjectDeclarationCid', 'varchar', (col) => - col.notNull().defaultTo(''), - ) - .execute() - await db.schema - .createTable('assertion') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('assertion', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectDeclarationCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addColumn('confirmUri', 'varchar') - .addColumn('confirmCid', 'varchar') - .addColumn('confirmCreated', 'varchar') - .addColumn('confirmIndexed', 'varchar') - .addUniqueConstraint('assertion_unique_subject', [ - 'creator', - 'subjectDid', - 'assertion', - ]) - .execute() -} - -type Schema = { - assertion: Assertion - duplicate_record: DuplicateRecord -} - -type Assertion = { - uri: string - confirmUri: string | null -} - -type DuplicateRecord = { - uri: string - duplicateOf: string -} diff --git a/packages/pds/src/db/migrations/20230328T214311001Z-votes-to-likes.ts b/packages/pds/src/db/migrations/20230328T214311001Z-votes-to-likes.ts deleted file mode 100644 index 026b710ad83..00000000000 --- a/packages/pds/src/db/migrations/20230328T214311001Z-votes-to-likes.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { Kysely, sql } from 'kysely' - -export async function up(db: Kysely): Promise { - // Nix downvotes from index - const downvotesQb = db.selectFrom('vote').where('direction', '=', 'down') - await db - .deleteFrom('duplicate_record') - .where('duplicateOf', 'in', downvotesQb.select('vote.uri')) - .execute() - await db.schema - .createTable('like') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - // Aids in index uniqueness plus post like counts - .addUniqueConstraint('like_unique_subject', ['subject', 'creator']) - .execute() - await db - .insertInto('like') - .columns([ - 'uri', - 'cid', - 'creator', - 'subject', - 'subjectCid', - 'createdAt', - 'indexedAt', - ]) - .expression((exp) => - exp - .selectFrom('vote') - .where('direction', '=', 'up') - .select([ - 'uri', - 'cid', - 'creator', - 'subject', - 'subjectCid', - 'createdAt', - 'indexedAt', - ]), - ) - .execute() - const missing = await db - .selectFrom('vote') - .select(sql`count(*)`.as('count')) - .where('direction', '=', 'up') - .whereNotExists( - db - .selectFrom('like') - .selectAll() - .whereRef('uri', '=', db.dynamic.ref('vote.uri')), - ) - .executeTakeFirstOrThrow() - if (missing.count !== 0) { - throw new Error( - `Likes were not migrated properly from votes: ${missing.count} likes missing.`, - ) - } - await db.schema.dropTable('vote').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('vote') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('direction', 'varchar', (col) => col.notNull()) - .addColumn('subject', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('vote_unique_subject', ['creator', 'subject']) - .execute() - await db - .insertInto('vote') - .columns([ - 'uri', - 'cid', - 'creator', - 'direction', - 'subject', - 'subjectCid', - 'createdAt', - 'indexedAt', - ]) - .expression((exp) => - exp - .selectFrom('like') - .select([ - 'uri', - 'cid', - 'creator', - sql`${'up'}`.as('direction'), - 'subject', - 'subjectCid', - 'createdAt', - 'indexedAt', - ]), - ) - .execute() - await db.schema.dropTable('like').execute() - await db.schema - .createIndex('vote_subject_direction_idx') - .on('vote') - .columns(['subject', 'direction']) - .execute() -} - -type Schema = { - vote: Vote - like: Like - duplicate_record: DuplicateRecord -} - -type Vote = { - uri: string - direction: 'up' | 'down' - [k: string]: unknown -} - -type Like = { - uri: string - [k: string]: unknown -} - -type DuplicateRecord = { - uri: string - duplicateOf: string -} diff --git a/packages/pds/src/db/migrations/20230328T214311002Z-remove-post-entities.ts b/packages/pds/src/db/migrations/20230328T214311002Z-remove-post-entities.ts deleted file mode 100644 index 480922aaf0f..00000000000 --- a/packages/pds/src/db/migrations/20230328T214311002Z-remove-post-entities.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema.dropTable('post_entity').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('post_entity') - .addColumn('postUri', 'varchar', (col) => col.notNull()) - .addColumn('startIndex', 'integer', (col) => col.notNull()) - .addColumn('endIndex', 'integer', (col) => col.notNull()) - .addColumn('type', 'varchar', (col) => col.notNull()) - .addColumn('value', 'varchar', (col) => col.notNull()) - .execute() -} diff --git a/packages/pds/src/db/migrations/20230328T214311003Z-backlinks.ts b/packages/pds/src/db/migrations/20230328T214311003Z-backlinks.ts deleted file mode 100644 index eff4bb35381..00000000000 --- a/packages/pds/src/db/migrations/20230328T214311003Z-backlinks.ts +++ /dev/null @@ -1,152 +0,0 @@ -import { Kysely, sql } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('backlink') - .addColumn('uri', 'varchar', (col) => col.notNull()) - .addColumn('path', 'varchar', (col) => col.notNull()) - .addColumn('linkToUri', 'varchar') - .addColumn('linkToDid', 'varchar') - .addPrimaryKeyConstraint('backlinks_pkey', ['uri', 'path']) - .addCheckConstraint( - 'backlink_link_to_chk', - // Exactly one of linkToUri or linkToDid should be set - sql`("linkToUri" is null and "linkToDid" is not null) or ("linkToUri" is not null and "linkToDid" is null)`, - ) - .execute() - - // Seed backlinks - - // Existing likes and their dupes - await db - .insertInto('backlink') - .columns(['uri', 'linkToUri', 'path']) - .expression((exp) => - exp - .selectFrom('like') - .select(['like.uri', 'like.subject', sql`${'subject.uri'}`.as('path')]), - ) - .execute() - await db - .insertInto('backlink') - .columns(['uri', 'linkToUri', 'path']) - .expression((exp) => - exp - .selectFrom('duplicate_record') - .innerJoin('like', 'like.uri', 'duplicate_record.duplicateOf') - .select([ - 'duplicate_record.uri', - 'like.subject', - sql`${'subject.uri'}`.as('path'), - ]), - ) - .execute() - - // Existing reposts and their dupes - await db - .insertInto('backlink') - .columns(['uri', 'linkToUri', 'path']) - .expression((exp) => - exp - .selectFrom('repost') - .select([ - 'repost.uri', - 'repost.subject', - sql`${'subject.uri'}`.as('path'), - ]), - ) - .execute() - await db - .insertInto('backlink') - .columns(['uri', 'linkToUri', 'path']) - .expression((exp) => - exp - .selectFrom('duplicate_record') - .innerJoin('repost', 'repost.uri', 'duplicate_record.duplicateOf') - .select([ - 'duplicate_record.uri', - 'repost.subject', - sql`${'subject.uri'}`.as('path'), - ]), - ) - .execute() - - // Existing follows and their dupes - await db - .insertInto('backlink') - .columns(['uri', 'linkToDid', 'path']) - .expression((exp) => - exp - .selectFrom('follow') - .select([ - 'follow.uri', - 'follow.subjectDid', - sql`${'subject'}`.as('path'), - ]), - ) - .execute() - await db - .insertInto('backlink') - .columns(['uri', 'linkToDid', 'path']) - .expression((exp) => - exp - .selectFrom('duplicate_record') - .innerJoin('follow', 'follow.uri', 'duplicate_record.duplicateOf') - .select([ - 'duplicate_record.uri', - 'follow.subjectDid', - sql`${'subject'}`.as('path'), - ]), - ) - .execute() - - await db.schema - .createIndex('backlink_path_to_uri_idx') - .on('backlink') - .columns(['path', 'linkToUri']) - .execute() - await db.schema - .createIndex('backlink_path_to_did_idx') - .on('backlink') - .columns(['path', 'linkToDid']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('backlink').execute() -} - -type Schema = { - backlink: Backlink - like: Like - follow: Follow - repost: Repost - duplicate_record: DuplicateRecord -} - -interface Backlink { - uri: string - path: string - linkToUri: string | null - linkToDid: string | null -} - -interface Like { - uri: string - subject: string -} - -interface Follow { - uri: string - subjectDid: string -} - -interface Repost { - uri: string - subject: string -} - -interface DuplicateRecord { - uri: string - duplicateOf: string -} diff --git a/packages/pds/src/db/migrations/20230328T214311004Z-profile-display-name-empty.ts b/packages/pds/src/db/migrations/20230328T214311004Z-profile-display-name-empty.ts deleted file mode 100644 index f1c78ce09b8..00000000000 --- a/packages/pds/src/db/migrations/20230328T214311004Z-profile-display-name-empty.ts +++ /dev/null @@ -1,118 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely, dialect): Promise { - if (dialect === 'pg') { - await db.schema - .alterTable('profile') - .alterColumn('displayName') - .dropNotNull() - .execute() - return - } - - // Sqlite version below: Need to recreate table due to sqlite limitations. - - // Drop old indices - await db.schema.dropIndex('profile_creator_idx').execute() - // Create table w/ change - await db.schema - .createTable('profile_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('displayName', 'varchar') // <-- change is here, making displayName nullable - .addColumn('description', 'varchar') - .addColumn('avatarCid', 'varchar') - .addColumn('bannerCid', 'varchar') - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - // Fill table - await db - .insertInto('profile_temp') - .columns(profileColumns) - .expression((exp) => exp.selectFrom('profile').select(profileColumns)) - .execute() - // Replace table by name - await db.schema.dropTable('profile').execute() - await db.schema.alterTable('profile_temp').renameTo('profile').execute() - // Recreate indices - await db.schema // Supports profile views - .createIndex('profile_creator_idx') - .on('profile') - .column('creator') - .execute() -} - -export async function down(db: Kysely, dialect): Promise { - if (dialect === 'pg') { - await db.schema - .alterTable('profile') - .alterColumn('displayName') - .setNotNull() - .execute() - return - } - - // Sqlite version below: Need to recreate table due to sqlite limitations. - - // Drop old indices - await db.schema.dropIndex('profile_creator_idx').execute() - // Create table w/ change - await db.schema - .createTable('profile_temp') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('displayName', 'varchar', (col) => col.notNull()) // <-- change is here, making displayName non-nullable again - .addColumn('description', 'varchar') - .addColumn('avatarCid', 'varchar') - .addColumn('bannerCid', 'varchar') - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - // Fill table - await db - .insertInto('profile_temp') - .columns(profileColumns) - .expression((exp) => exp.selectFrom('profile').select(profileColumns)) - .execute() - // Replace table by name - await db.schema.dropTable('profile').execute() - await db.schema.alterTable('profile_temp').renameTo('profile').execute() - // Recreate indices - await db.schema - .createIndex('profile_creator_idx') - .on('profile') - .column('creator') - .execute() -} - -type Schema = { - profile: Profile - profile_temp: ProfileTemp -} - -type Profile = { - uri: string - cid: string - creator: string - displayName: string - description: string | null - avatarCid: string | null - bannerCid: string | null - indexedAt: string -} - -type ProfileTemp = Omit & { - displayName: string | null -} - -const profileColumns: (keyof Profile)[] = [ - 'uri', - 'cid', - 'creator', - 'displayName', - 'description', - 'avatarCid', - 'bannerCid', - 'indexedAt', -] diff --git a/packages/pds/src/db/migrations/20230328T214311005Z-rework-seq.ts b/packages/pds/src/db/migrations/20230328T214311005Z-rework-seq.ts deleted file mode 100644 index 144fa3a2075..00000000000 --- a/packages/pds/src/db/migrations/20230328T214311005Z-rework-seq.ts +++ /dev/null @@ -1,110 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -const repoSeqTable = 'repo_seq' -const repoOpTable = 'repo_op' -const repoSeqDidIndex = 'repo_seq_did_index' -const repoSeqCommitIndex = 'repo_seq_commit_index' -const repoSeqEventTypeIndex = 'repo_seq_event_type_index' -const repoSeqSequencedAtIndex = 'repo_seq_sequenced_at_index' - -export async function up(db: Kysely, dialect: Dialect): Promise { - await db.schema.dropIndex(repoSeqCommitIndex).execute() - await db.schema.dropIndex(repoSeqDidIndex).execute() - await db.schema.dropTable(repoSeqTable).execute() - await db.schema.dropTable(repoOpTable).execute() - - let builder = db.schema.createTable(repoSeqTable) - if (dialect === 'pg') { - builder = builder - .addColumn('seq', 'bigserial', (col) => col.primaryKey()) - .addColumn('invalidatedBy', 'bigint') - } else { - builder = builder - .addColumn('seq', 'integer', (col) => col.autoIncrement().primaryKey()) - .addColumn('invalidatedBy', 'integer') - } - - const binaryDatatype = dialect === 'sqlite' ? sql`blob` : sql`bytea` - await builder - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('event', binaryDatatype, (col) => col.notNull()) - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .addForeignKeyConstraint( - 'invalidated_by_fkey', - // @ts-ignore - ['invalidatedBy'], - 'repo_seq', - ['seq'], - ) - .execute() - - // for filtering seqs based on did - await db.schema - .createIndex(repoSeqDidIndex) - .on(repoSeqTable) - .column('did') - .execute() - - // for filtering seqs based on event type - await db.schema - .createIndex(repoSeqEventTypeIndex) - .on(repoSeqTable) - .column('eventType') - .execute() - - // for entering into the seq stream at a particular time - await db.schema - .createIndex(repoSeqSequencedAtIndex) - .on(repoSeqTable) - .column('sequencedAt') - .execute() -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - await db.schema.dropIndex(repoSeqSequencedAtIndex).execute() - await db.schema.dropIndex(repoSeqEventTypeIndex).execute() - await db.schema.dropIndex(repoSeqDidIndex).execute() - await db.schema.dropTable(repoSeqTable).execute() - - let builder = db.schema.createTable(repoSeqTable) - if (dialect === 'pg') { - builder = builder.addColumn('seq', 'serial', (col) => col.primaryKey()) - } else { - builder = builder.addColumn('seq', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - } - await builder - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createIndex(repoSeqDidIndex) - .on(repoSeqTable) - .column('did') - .execute() - - await db.schema - .createIndex(repoSeqCommitIndex) - .on(repoSeqTable) - .column('commit') - .execute() - - await db.schema - .createTable(repoOpTable) - .addColumn('did', 'text', (col) => col.notNull()) - .addColumn('commit', 'text', (col) => col.notNull()) - .addColumn('action', 'text', (col) => col.notNull()) - .addColumn('path', 'text', (col) => col.notNull()) - .addColumn('cid', 'text') - .addPrimaryKeyConstraint('repo_op_pkey', ['did', 'commit', 'path']) - .execute() -} diff --git a/packages/pds/src/db/migrations/20230406T185855842Z-feed-item-init.ts b/packages/pds/src/db/migrations/20230406T185855842Z-feed-item-init.ts deleted file mode 100644 index 0d26fddc22b..00000000000 --- a/packages/pds/src/db/migrations/20230406T185855842Z-feed-item-init.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { DynamicModule, Kysely, sql } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('feed_item') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('type', 'varchar', (col) => col.notNull()) - .addColumn('postUri', 'varchar', (col) => col.notNull()) - .addColumn('originatorDid', 'varchar', (col) => col.notNull()) - .addColumn('sortAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createIndex('feed_item_originator_idx') - .on('feed_item') - .column('originatorDid') - .execute() - await db.schema - .createIndex('feed_item_cursor_idx') - .on('feed_item') - .columns(['sortAt', 'cid']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('feed_item').execute() -} - -// This is intentionally not called here, but exists for documentation purposes. -// This query should be safe to be run any time to update the feed_item index. -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function getFeedItemMigrationSql(db: Kysely, dialect: Dialect) { - const { ref } = db.dynamic - const migrationQb = db - .insertInto('feed_item') - .columns(['type', 'uri', 'cid', 'postUri', 'originatorDid', 'sortAt']) - .expression((qb) => { - return qb - .selectFrom('post') - .select([ - sql`'post'`.as('type'), - 'uri', - 'cid', - 'uri as postUri', - 'creator as originatorDid', - min(dialect, ref('indexedAt'), ref('createdAt')).as('sortAt'), - ]) - .unionAll( - qb - .selectFrom('repost') - .innerJoin('post', 'post.uri', 'repost.subject') - .select([ - sql`'repost'`.as('type'), - 'repost.uri as uri', - 'repost.cid as cid', - 'post.uri as postUri', - 'repost.creator as originatorDid', - min(dialect, ref('repost.indexedAt'), ref('repost.createdAt')).as( - 'sortAt', - ), - ]), - ) - }) - .onConflict((oc) => oc.doNothing()) - return migrationQb.compile().sql -} - -type Schema = { - feed_item: FeedItem - post: Post - repost: Repost -} - -interface FeedItem { - uri: string - cid: string - type: 'post' | 'repost' - postUri: string - postAuthorDid: string - originatorDid: string - sortAt: string -} - -interface Post { - uri: string - cid: string - creator: string - text: string - replyRoot: string | null - replyRootCid: string | null - replyParent: string | null - replyParentCid: string | null - createdAt: string - indexedAt: string -} - -interface Repost { - uri: string - cid: string - creator: string - subject: string - subjectCid: string - createdAt: string - indexedAt: string -} - -function min(dialect: Dialect, refA: DbRef, refB: DbRef) { - if (dialect === 'pg') { - return sql`least(${refA}, ${refB})` - } else { - return sql`min(${refA}, ${refB})` - } -} - -type DbRef = ReturnType diff --git a/packages/pds/src/db/migrations/20230411T175730759Z-drop-message-queue.ts b/packages/pds/src/db/migrations/20230411T175730759Z-drop-message-queue.ts deleted file mode 100644 index 808d87355c9..00000000000 --- a/packages/pds/src/db/migrations/20230411T175730759Z-drop-message-queue.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -const messageQueueTable = 'message_queue' -const messageQueueCursorTable = 'message_queue_cursor' - -export async function up(db: Kysely): Promise { - await db.schema.dropTable(messageQueueCursorTable).execute() - await db.schema.dropTable(messageQueueTable).execute() -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - let mqBuilder = db.schema.createTable(messageQueueTable) - mqBuilder = - dialect === 'pg' - ? mqBuilder.addColumn('id', 'serial', (col) => col.primaryKey()) - : mqBuilder.addColumn('id', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - mqBuilder - .addColumn('message', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .execute() - await db.schema - .createTable(messageQueueCursorTable) - .addColumn('consumer', 'varchar', (col) => col.primaryKey()) - .addColumn('cursor', 'integer', (col) => col.notNull()) - .execute() -} diff --git a/packages/pds/src/db/migrations/20230411T180247652Z-labels.ts b/packages/pds/src/db/migrations/20230411T180247652Z-labels.ts deleted file mode 100644 index 86f3b47a721..00000000000 --- a/packages/pds/src/db/migrations/20230411T180247652Z-labels.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('label') - .addColumn('sourceDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectUri', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar') - .addColumn('value', 'varchar', (col) => col.notNull()) - .addColumn('negated', 'int2', (col) => col.notNull()) // @TODO convert to boolean in appview - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('label_pkey', [ - 'sourceDid', - 'subjectUri', - 'subjectCid', - 'value', - ]) - .execute() - - await db.schema - .createIndex('label_subject_uri_index') - .on('label') - .column('subjectUri') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('label').execute() -} diff --git a/packages/pds/src/db/migrations/20230412T231807162Z-moderation-action-labels.ts b/packages/pds/src/db/migrations/20230412T231807162Z-moderation-action-labels.ts deleted file mode 100644 index 58f015a0c39..00000000000 --- a/packages/pds/src/db/migrations/20230412T231807162Z-moderation-action-labels.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { Kysely } from 'kysely' - -const moderationActionTable = 'moderation_action' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable(moderationActionTable) - .addColumn('createLabelVals', 'varchar') - .execute() - - await db.schema - .alterTable(moderationActionTable) - .addColumn('negateLabelVals', 'varchar') - .execute() - - await db.schema.dropTable('label').execute() - - await db.schema - .createTable('label') - .addColumn('src', 'varchar', (col) => col.notNull()) - .addColumn('uri', 'varchar', (col) => col.notNull()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('val', 'varchar', (col) => col.notNull()) - .addColumn('neg', 'int2', (col) => col.notNull()) // @TODO convert to boolean in appview - .addColumn('cts', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('label_pkey', ['src', 'uri', 'cid', 'val']) - .execute() - - await db.schema - .createIndex('label_uri_index') - .on('label') - .column('uri') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .alterTable(moderationActionTable) - .dropColumn('createLabelVals') - .execute() - - await db.schema - .alterTable(moderationActionTable) - .dropColumn('negateLabelVals') - .execute() - - await db.schema.dropTable('label').execute() - - await db.schema - .createTable('label') - .addColumn('sourceDid', 'varchar', (col) => col.notNull()) - .addColumn('subjectUri', 'varchar', (col) => col.notNull()) - .addColumn('subjectCid', 'varchar') - .addColumn('value', 'varchar', (col) => col.notNull()) - .addColumn('negated', 'int2', (col) => col.notNull()) // @TODO convert to boolean in appview - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('label_pkey', [ - 'sourceDid', - 'subjectUri', - 'subjectCid', - 'value', - ]) - .execute() - - await db.schema - .createIndex('label_subject_uri_index') - .on('label') - .column('subjectUri') - .execute() -} diff --git a/packages/pds/src/db/migrations/20230416T221236745Z-app-specific-passwords.ts b/packages/pds/src/db/migrations/20230416T221236745Z-app-specific-passwords.ts deleted file mode 100644 index f518188c844..00000000000 --- a/packages/pds/src/db/migrations/20230416T221236745Z-app-specific-passwords.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable('refresh_token') - .addColumn('appPasswordName', 'varchar') - .execute() - - await db.schema - .createTable('app_password') - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('name', 'varchar', (col) => col.notNull()) - .addColumn('passwordScrypt', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('app_password_pkey', ['did', 'name']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('app_password').execute() - - await db.schema - .alterTable('refresh_token') - .dropColumn('appPasswordName') - .execute() -} diff --git a/packages/pds/src/db/migrations/20230420T143821201Z-post-profile-aggs.ts b/packages/pds/src/db/migrations/20230420T143821201Z-post-profile-aggs.ts deleted file mode 100644 index a42346d9201..00000000000 --- a/packages/pds/src/db/migrations/20230420T143821201Z-post-profile-aggs.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { Generated, Kysely, sql } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('post_agg') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('likeCount', 'bigint', (col) => col.notNull().defaultTo(0)) - .addColumn('replyCount', 'bigint', (col) => col.notNull().defaultTo(0)) - .addColumn('repostCount', 'bigint', (col) => col.notNull().defaultTo(0)) - .execute() - await db.schema - .createTable('profile_agg') - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('followersCount', 'bigint', (col) => col.notNull().defaultTo(0)) - .addColumn('followsCount', 'bigint', (col) => col.notNull().defaultTo(0)) - .addColumn('postsCount', 'bigint', (col) => col.notNull().defaultTo(0)) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('profile_agg').execute() - await db.schema.dropTable('post_agg').execute() -} - -// This is intentionally not called here, but exists for documentation purposes. -// This query should be safe to be run any time to update the feed_item index. - -// @NOTE these can only update records that do not have a "zero" count, so it's suitable for an initial -// run, but it's not suitable for a general refresh (which may need to update a count down to zero). - -// eslint-disable-next-line @typescript-eslint/no-unused-vars -function getAggMigrationSql(db: Kysely) { - const { ref } = db.dynamic - const excluded = (col: string) => ref(`excluded.${col}`) - - const likeCountQb = db - .insertInto('post_agg') - .columns(['uri', 'likeCount']) - .expression((exp) => - exp - .selectFrom('like') - .groupBy('like.subject') - .select(['like.subject as uri', countAll.as('likeCount')]), - ) - .onConflict((oc) => - oc - .column('uri') - .doUpdateSet({ likeCount: sql`${excluded('likeCount')}` }), - ) - - const replyCountQb = db - .insertInto('post_agg') - .columns(['uri', 'replyCount']) - .expression((exp) => - exp - .selectFrom('post') - .where('replyParent', 'is not', null) - .groupBy('post.replyParent') - .select(['post.replyParent as uri', countAll.as('replyCount')]), - ) - .onConflict((oc) => - oc - .column('uri') - .doUpdateSet({ replyCount: sql`${excluded('replyCount')}` }), - ) - - const repostCountQb = db - .insertInto('post_agg') - .columns(['uri', 'repostCount']) - .expression((exp) => - exp - .selectFrom('repost') - .groupBy('repost.subject') - .select(['repost.subject as uri', countAll.as('repostCount')]), - ) - .onConflict((oc) => - oc - .column('uri') - .doUpdateSet({ repostCount: sql`${excluded('repostCount')}` }), - ) - - const followersCountQb = db - .insertInto('profile_agg') - .columns(['did', 'followersCount']) - .expression((exp) => - exp - .selectFrom('follow') - .groupBy('follow.subjectDid') - .select(['follow.subjectDid as did', countAll.as('followersCount')]), - ) - .onConflict((oc) => - oc - .column('did') - .doUpdateSet({ followersCount: sql`${excluded('followersCount')}` }), - ) - - const followsCountQb = db - .insertInto('profile_agg') - .columns(['did', 'followsCount']) - .expression((exp) => - exp - .selectFrom('follow') - .groupBy('follow.creator') - .select(['follow.creator as did', countAll.as('followsCount')]), - ) - .onConflict((oc) => - oc - .column('did') - .doUpdateSet({ followsCount: sql`${excluded('followsCount')}` }), - ) - - const postsCountQb = db - .insertInto('profile_agg') - .columns(['did', 'postsCount']) - .expression((exp) => - exp - .selectFrom('post') - .groupBy('post.creator') - .select(['post.creator as did', countAll.as('postsCount')]), - ) - .onConflict((oc) => - oc - .column('did') - .doUpdateSet({ postsCount: sql`${excluded('postsCount')}` }), - ) - - return [ - likeCountQb.compile().sql, - replyCountQb.compile().sql, - repostCountQb.compile().sql, - followersCountQb.compile().sql, - followsCountQb.compile().sql, - postsCountQb.compile().sql, - ].join(';\n\n') -} - -const countAll = sql`count(*)` - -type Schema = { - post_agg: PostAgg - profile_agg: ProfileAgg - like: Like - follow: Follow - post: Post - repost: Repost -} - -interface PostAgg { - uri: string - likeCount: Generated - replyCount: Generated - repostCount: Generated -} - -interface ProfileAgg { - did: string - followersCount: Generated - followsCount: Generated - postsCount: Generated -} - -interface Like { - subject: string -} - -interface Follow { - creator: string - subjectDid: string -} - -interface Post { - creator: string - replyParent: string | null -} - -interface Repost { - subject: string -} diff --git a/packages/pds/src/db/migrations/20230427T194652255Z-notif-record-index.ts b/packages/pds/src/db/migrations/20230427T194652255Z-notif-record-index.ts deleted file mode 100644 index de4031253b2..00000000000 --- a/packages/pds/src/db/migrations/20230427T194652255Z-notif-record-index.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - // Supports record deletion - await db.schema - .createIndex('user_notification_record_idx') - .on('user_notification') - .column('recordUri') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('user_notification_record_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230428T195614638Z-actor-block-init.ts b/packages/pds/src/db/migrations/20230428T195614638Z-actor-block-init.ts deleted file mode 100644 index 1da2ac2595e..00000000000 --- a/packages/pds/src/db/migrations/20230428T195614638Z-actor-block-init.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('actor_block') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('actor_block_unique_subject', [ - 'creator', - 'subjectDid', - ]) - .execute() - await db.schema - .createIndex('actor_block_subjectdid_idx') - .on('actor_block') - .column('subjectDid') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('actor_block_subjectdid_idx').execute() - await db.schema.dropTable('actor_block').execute() -} diff --git a/packages/pds/src/db/migrations/20230508T193807762Z-acct-deletion-indexes.ts b/packages/pds/src/db/migrations/20230508T193807762Z-acct-deletion-indexes.ts deleted file mode 100644 index f1504abe611..00000000000 --- a/packages/pds/src/db/migrations/20230508T193807762Z-acct-deletion-indexes.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -// Indexes to support efficient account deletion - -export async function up(db: Kysely, dialect: Dialect): Promise { - await db.schema // Also supports record deletes - .createIndex('duplicate_record_duplicate_of_idx') - .on('duplicate_record') - .column('duplicateOf') - .execute() - await db.schema - .createIndex('like_creator_idx') - .on('like') - .column('creator') - .execute() - await db.schema - .createIndex('user_notification_author_idx') - .on('user_notification') - .column('author') - .execute() - if (dialect !== 'sqlite') { - // We want to keep record of the moderations actions even when deleting the underlying repo_blob record. - await db.schema - .alterTable('moderation_action_subject_blob') - .dropConstraint('moderation_action_subject_blob_repo_blob_fkey') - .execute() - } -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - if (dialect !== 'sqlite') { - await db.schema - .alterTable('moderation_action_subject_blob') - .addForeignKeyConstraint( - 'moderation_action_subject_blob_repo_blob_fkey', - ['cid', 'recordUri'], - 'repo_blob', - ['cid', 'recordUri'], - ) - .execute() - } - await db.schema.dropIndex('user_notification_author_idx').execute() - await db.schema.dropIndex('like_creator_idx').execute() - await db.schema.dropIndex('duplicate_record_duplicate_of_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230508T232711152Z-disable-account-invites.ts b/packages/pds/src/db/migrations/20230508T232711152Z-disable-account-invites.ts deleted file mode 100644 index 254c92eccc9..00000000000 --- a/packages/pds/src/db/migrations/20230508T232711152Z-disable-account-invites.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable('user_account') - .addColumn('invitesDisabled', 'int2', (col) => col.notNull().defaultTo(0)) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .alterTable('user_account') - .dropColumn('invitesDisabled') - .execute() -} diff --git a/packages/pds/src/db/migrations/20230509T192324175Z-seq-invalidated.ts b/packages/pds/src/db/migrations/20230509T192324175Z-seq-invalidated.ts deleted file mode 100644 index abdfd025b22..00000000000 --- a/packages/pds/src/db/migrations/20230509T192324175Z-seq-invalidated.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -const repoSeqDidIndex = 'repo_seq_did_index' -const repoSeqEventTypeIndex = 'repo_seq_event_type_index' -const repoSeqSequencedAtIndex = 'repo_seq_sequenced_at_index' - -export async function up(db: Kysely, dialect: Dialect): Promise { - if (dialect === 'pg') { - await db.schema - .alterTable('repo_seq') - .dropConstraint('invalidated_by_fkey') - .execute() - await db.schema.alterTable('repo_seq').dropColumn('invalidatedBy').execute() - await db.schema - .alterTable('repo_seq') - .addColumn('invalidated', 'int2', (col) => col.notNull().defaultTo(0)) - .execute() - } else { - await db.schema.dropTable('repo_seq').execute() - await db.schema - .createTable('repo_seq') - .addColumn('seq', 'integer', (col) => col.autoIncrement().primaryKey()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('event', sql`blob`, (col) => col.notNull()) - .addColumn('invalidated', 'int2', (col) => col.notNull().defaultTo(0)) - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .execute() - } -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - if (dialect === 'pg') { - await db.schema.alterTable('repo_seq').dropColumn('invalidated').execute() - await db.schema - .alterTable('repo_seq') - .addColumn('invalidatedBy', 'bigint') - .execute() - await db.schema - .alterTable('repo_seq') - .addForeignKeyConstraint( - 'invalidated_by_fkey', - // @ts-ignore - ['invalidatedBy'], - 'repo_seq', - ['seq'], - ) - .execute() - } else { - await db.schema.dropTable('repo_seq').execute() - await db.schema - .createTable('repo_seq') - .addColumn('seq', 'integer', (col) => col.autoIncrement().primaryKey()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('event', sql`blob`, (col) => col.notNull()) - .addColumn('invalidatedBy', 'integer') - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .addForeignKeyConstraint( - 'invalidated_by_fkey', - // @ts-ignore - ['invalidatedBy'], - 'repo_seq', - ['seq'], - ) - .execute() - - // for filtering seqs based on did - await db.schema - .createIndex(repoSeqDidIndex) - .on('repo_seq') - .column('did') - .execute() - - // for filtering seqs based on event type - await db.schema - .createIndex(repoSeqEventTypeIndex) - .on('repo_seq') - .column('eventType') - .execute() - - // for entering into the seq stream at a particular time - await db.schema - .createIndex(repoSeqSequencedAtIndex) - .on('repo_seq') - .column('sequencedAt') - .execute() - } -} diff --git a/packages/pds/src/db/migrations/20230511T154721392Z-mute-lists.ts b/packages/pds/src/db/migrations/20230511T154721392Z-mute-lists.ts deleted file mode 100644 index 20424410ce0..00000000000 --- a/packages/pds/src/db/migrations/20230511T154721392Z-mute-lists.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('list') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('name', 'varchar', (col) => col.notNull()) - .addColumn('purpose', 'varchar', (col) => col.notNull()) - .addColumn('description', 'varchar') - .addColumn('descriptionFacets', 'varchar') - .addColumn('avatarCid', 'varchar') - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createIndex('list_creator_idx') - .on('list') - .column('creator') - .execute() - - await db.schema - .createTable('list_item') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('subjectDid', 'varchar', (col) => col.notNull()) - .addColumn('listUri', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .addUniqueConstraint('list_item_unique_subject_in_list', [ - 'listUri', - 'subjectDid', - ]) - .execute() - - await db.schema - .createIndex('list_item_creator_idx') - .on('list_item') - .column('creator') - .execute() - - await db.schema - .createIndex('list_item_subject_idx') - .on('list_item') - .column('subjectDid') - .execute() - - await db.schema - .createTable('list_mute') - .addColumn('listUri', 'varchar', (col) => col.notNull()) - .addColumn('mutedByDid', 'varchar', (col) => col.notNull()) - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('list_mute_pkey', ['mutedByDid', 'listUri']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('list_creator_idx').execute() - await db.schema.dropIndex('list_item_subject_idx').execute() - await db.schema.dropTable('list').execute() - await db.schema.dropTable('list_item').execute() - await db.schema.dropTable('list_mute').execute() -} diff --git a/packages/pds/src/db/migrations/20230511T171739449Z-actor-preferences.ts b/packages/pds/src/db/migrations/20230511T171739449Z-actor-preferences.ts deleted file mode 100644 index 245118c6364..00000000000 --- a/packages/pds/src/db/migrations/20230511T171739449Z-actor-preferences.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - let builder = db.schema.createTable('user_pref') - builder = - dialect === 'pg' - ? builder.addColumn('id', 'bigserial', (col) => col.primaryKey()) - : builder.addColumn('id', 'integer', (col) => - col.autoIncrement().primaryKey(), - ) - await builder - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('name', 'varchar', (col) => col.notNull()) - .addColumn('valueJson', 'text', (col) => col.notNull()) - .execute() - await db.schema - .createIndex('user_pref_did_idx') - .on('user_pref') - .column('did') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('user_pref').execute() -} diff --git a/packages/pds/src/db/migrations/20230511T200212974Z-feed-generators.ts b/packages/pds/src/db/migrations/20230511T200212974Z-feed-generators.ts deleted file mode 100644 index 5e8b4ba0965..00000000000 --- a/packages/pds/src/db/migrations/20230511T200212974Z-feed-generators.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('feed_generator') - .addColumn('uri', 'varchar', (col) => col.primaryKey()) - .addColumn('cid', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addColumn('feedDid', 'varchar', (col) => col.notNull()) - .addColumn('displayName', 'varchar') - .addColumn('description', 'varchar') - .addColumn('descriptionFacets', 'varchar') - .addColumn('avatarCid', 'varchar') - .addColumn('createdAt', 'varchar', (col) => col.notNull()) - .addColumn('indexedAt', 'varchar', (col) => col.notNull()) - .execute() - - await db.schema - .createIndex('feed_generator_creator_index') - .on('feed_generator') - .column('creator') - .execute() - - await db.schema - .createIndex('feed_generator_feed_did_index') - .on('feed_generator') - .column('feedDid') - .execute() - - await db.schema - .createTable('did_cache') - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('doc', 'text', (col) => col.notNull()) - .addColumn('updatedAt', 'bigint', (col) => col.notNull()) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('did_cache').execute() - await db.schema.dropIndex('feed_generator_creator_index').execute() - await db.schema.dropIndex('feed_generator_feed_did_index').execute() - await db.schema.dropTable('feed_generator').execute() -} diff --git a/packages/pds/src/db/migrations/20230523T183902064Z-algo-whats-hot-view.ts b/packages/pds/src/db/migrations/20230523T183902064Z-algo-whats-hot-view.ts deleted file mode 100644 index 8e4b7e3a943..00000000000 --- a/packages/pds/src/db/migrations/20230523T183902064Z-algo-whats-hot-view.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - if (dialect !== 'pg') return - - const { ref } = db.dynamic - - // materialized views are difficult to change, - // so we parameterize them at runtime with contents of this table. - await db.schema - .createTable('view_param') - .addColumn('name', 'varchar', (col) => col.primaryKey()) - .addColumn('value', 'varchar') - .execute() - - await db - .insertInto('view_param') - .values([ - { name: 'whats_hot_like_threshold', value: '10' }, - { name: 'whats_hot_interval', value: '1day' }, - ]) - .execute() - - // define view query for whats-hot feed - // tldr: scored by like count depreciated over time. - - // From: https://medium.com/hacking-and-gonzo/how-hacker-news-ranking-algorithm-works-1d9b0cf2c08d - // Score = (P-1) / (T+2)^G - // where, - // P = points of an item (and -1 is to negate submitters vote) - // T = time since submission (in hours) - // G = Gravity, defaults to 1.8 in news.arc - - const likeCount = ref('post_agg.likeCount') - const indexedAt = ref('post.indexedAt') - const computeScore = sql`round(1000000 * (${likeCount} / ((EXTRACT(epoch FROM AGE(now(), ${indexedAt}::timestamp)) / 3600 + 2) ^ 1.8)))` - - const viewQb = db - .selectFrom('post') - .innerJoin('post_agg', 'post_agg.uri', 'post.uri') - .where( - 'post.indexedAt', - '>', - db - .selectFrom('view_param') - .where('name', '=', 'whats_hot_interval') - .select( - sql`to_char(now() - value::interval, 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"')`.as( - 'val', - ), - ), - ) - .where('post.replyParent', 'is', null) - .where( - 'post_agg.likeCount', - '>', - db // helps cull result set that needs to be sorted - .selectFrom('view_param') - .where('name', '=', 'whats_hot_like_threshold') - .select(sql`value::integer`.as('val')), - ) - .select(['post.uri as uri', 'post.cid as cid', computeScore.as('score')]) - - await db.schema - .createView('algo_whats_hot_view') - .materialized() - .as(viewQb) - .execute() - - // unique index required for pg to refresh view w/ "concurrently" param. - await db.schema - .createIndex('algo_whats_hot_view_uri_idx') - .on('algo_whats_hot_view') - .column('uri') - .unique() - .execute() - await db.schema - .createIndex('algo_whats_hot_view_cursor_idx') - .on('algo_whats_hot_view') - .columns(['score', 'cid']) - .execute() -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - if (dialect !== 'pg') return - await db.schema.dropView('algo_whats_hot_view').materialized().execute() - await db.schema.dropTable('view_param').execute() -} diff --git a/packages/pds/src/db/migrations/20230529T222706121Z-suggested-follows.ts b/packages/pds/src/db/migrations/20230529T222706121Z-suggested-follows.ts deleted file mode 100644 index 11d4ce09be6..00000000000 --- a/packages/pds/src/db/migrations/20230529T222706121Z-suggested-follows.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('suggested_follow') - .addColumn('did', 'varchar', (col) => col.primaryKey()) - .addColumn('order', 'integer', (col) => col.notNull()) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('suggested_follow').execute() -} diff --git a/packages/pds/src/db/migrations/20230530T213530067Z-rebase-indices.ts b/packages/pds/src/db/migrations/20230530T213530067Z-rebase-indices.ts deleted file mode 100644 index 299609f5d1b..00000000000 --- a/packages/pds/src/db/migrations/20230530T213530067Z-rebase-indices.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createIndex('repo_blob_did_idx') - .on('repo_blob') - .column('did') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('repo_blob_did_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230605T235529700Z-outgoing-repo-seq.ts b/packages/pds/src/db/migrations/20230605T235529700Z-outgoing-repo-seq.ts deleted file mode 100644 index cbf0bcd6dac..00000000000 --- a/packages/pds/src/db/migrations/20230605T235529700Z-outgoing-repo-seq.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - if (dialect === 'sqlite') { - await db.schema.dropTable('repo_seq').execute() - await db.schema - .createTable('repo_seq') - .addColumn('id', 'integer', (col) => col.autoIncrement().primaryKey()) - .addColumn('seq', 'integer', (col) => col.unique()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('event', sql`blob`, (col) => col.notNull()) - .addColumn('invalidated', 'int2', (col) => col.notNull().defaultTo(0)) - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .execute() - } else { - await db.schema.alterTable('repo_seq').renameColumn('seq', 'id').execute() - await db.schema - .alterTable('repo_seq') - .addColumn('seq', 'bigint', (col) => col.unique()) - .execute() - } -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - if (dialect === 'sqlite') { - await db.schema.dropTable('repo_seq').execute() - await db.schema - .createTable('repo_seq') - .addColumn('seq', 'integer', (col) => col.autoIncrement().primaryKey()) - .addColumn('did', 'varchar', (col) => col.notNull()) - .addColumn('eventType', 'varchar', (col) => col.notNull()) - .addColumn('event', sql`blob`, (col) => col.notNull()) - .addColumn('invalidated', 'int2', (col) => col.notNull().defaultTo(0)) - .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) - .execute() - } else { - await db.schema.alterTable('repo_seq').dropColumn('seq').execute() - await db.schema.alterTable('repo_seq').renameColumn('id', 'seq').execute() - } -} diff --git a/packages/pds/src/db/migrations/20230613T164932261Z-init.ts b/packages/pds/src/db/migrations/20230613T164932261Z-init.ts new file mode 100644 index 00000000000..a150ff06cf9 --- /dev/null +++ b/packages/pds/src/db/migrations/20230613T164932261Z-init.ts @@ -0,0 +1,375 @@ +import { Kysely, sql } from 'kysely' +import { Dialect } from '..' + +// @TODO make takedownId a varchar w/o fkey? + +export async function up(db: Kysely, dialect: Dialect): Promise { + const binaryDatatype = dialect === 'sqlite' ? 'blob' : sql`bytea` + + await db.schema + .createTable('app_migration') + .addColumn('id', 'varchar', (col) => col.primaryKey()) + .addColumn('success', 'int2', (col) => col.notNull().defaultTo(0)) + .addColumn('completedAt', 'varchar', (col) => col) + .execute() + + await db.schema + .createTable('app_password') + .addColumn('did', 'varchar', (col) => col.notNull()) + .addColumn('name', 'varchar', (col) => col.notNull()) + .addColumn('passwordScrypt', 'varchar', (col) => col.notNull()) + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('app_password_pkey', ['did', 'name']) + .execute() + + await db.schema + .createTable('backlink') + .addColumn('uri', 'varchar', (col) => col.notNull()) + .addColumn('path', 'varchar', (col) => col.notNull()) + .addColumn('linkToUri', 'varchar') + .addColumn('linkToDid', 'varchar') + .addPrimaryKeyConstraint('backlinks_pkey', ['uri', 'path']) + .addCheckConstraint( + 'backlink_link_to_chk', + // Exactly one of linkToUri or linkToDid should be set + sql`("linkToUri" is null and "linkToDid" is not null) or ("linkToUri" is not null and "linkToDid" is null)`, + ) + .execute() + await db.schema + .createIndex('backlink_path_to_uri_idx') + .on('backlink') + .columns(['path', 'linkToUri']) + .execute() + await db.schema + .createIndex('backlink_path_to_did_idx') + .on('backlink') + .columns(['path', 'linkToDid']) + .execute() + + await db.schema + .createTable('blob') + .addColumn('creator', 'varchar', (col) => col.notNull()) + .addColumn('cid', 'varchar', (col) => col.notNull()) + .addColumn('mimeType', 'varchar', (col) => col.notNull()) + .addColumn('size', 'integer', (col) => col.notNull()) + .addColumn('tempKey', 'varchar') + .addColumn('width', 'integer') + .addColumn('height', 'integer') + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('blob_pkey', ['creator', 'cid']) + .execute() + + await db.schema + .createTable('delete_account_token') + .addColumn('did', 'varchar', (col) => col.primaryKey()) + .addColumn('token', 'varchar', (col) => col.notNull()) + .addColumn('requestedAt', 'varchar', (col) => col.notNull()) + .execute() + + await db.schema + .createTable('did_cache') + .addColumn('did', 'varchar', (col) => col.primaryKey()) + .addColumn('doc', 'text', (col) => col.notNull()) + .addColumn('updatedAt', 'bigint', (col) => col.notNull()) + .execute() + + await db.schema + .createTable('did_handle') + .addColumn('did', 'varchar', (col) => col.primaryKey()) + .addColumn('handle', 'varchar', (col) => col.unique()) + .execute() + await db.schema + .createIndex(`did_handle_handle_lower_idx`) + .unique() + .on('did_handle') + .expression(sql`lower("handle")`) + .execute() + + await db.schema + .createTable('invite_code') + .addColumn('code', 'varchar', (col) => col.primaryKey()) + .addColumn('availableUses', 'integer', (col) => col.notNull()) + .addColumn('disabled', 'int2', (col) => col.defaultTo(0)) + .addColumn('forUser', 'varchar', (col) => col.notNull()) + .addColumn('createdBy', 'varchar', (col) => col.notNull()) + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .execute() + + await db.schema + .createTable('invite_code_use') + .addColumn('code', 'varchar', (col) => col.notNull()) + .addColumn('usedBy', 'varchar', (col) => col.notNull()) + .addColumn('usedAt', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint(`invite_code_use_pkey`, ['code', 'usedBy']) + .execute() + + await db.schema + .createTable('ipld_block') + .addColumn('creator', 'varchar', (col) => col.notNull()) + .addColumn('cid', 'varchar', (col) => col.notNull()) + .addColumn('size', 'integer', (col) => col.notNull()) + .addColumn('content', binaryDatatype, (col) => col.notNull()) + .addPrimaryKeyConstraint('ipld_block_pkey', ['creator', 'cid']) + .execute() + + const moderationActionBuilder = + dialect === 'pg' + ? db.schema + .createTable('moderation_action') + .addColumn('id', 'serial', (col) => col.primaryKey()) + : db.schema + .createTable('moderation_action') + .addColumn('id', 'integer', (col) => col.autoIncrement().primaryKey()) + await moderationActionBuilder + .addColumn('action', 'varchar', (col) => col.notNull()) + .addColumn('subjectType', 'varchar', (col) => col.notNull()) + .addColumn('subjectDid', 'varchar', (col) => col.notNull()) + .addColumn('subjectUri', 'varchar') + .addColumn('subjectCid', 'varchar') + .addColumn('reason', 'text', (col) => col.notNull()) + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .addColumn('createdBy', 'varchar', (col) => col.notNull()) + .addColumn('reversedAt', 'varchar') + .addColumn('reversedBy', 'varchar') + .addColumn('reversedReason', 'text') + .addColumn('createLabelVals', 'varchar') + .addColumn('negateLabelVals', 'varchar') + .execute() + + await db.schema + .createTable('moderation_action_subject_blob') + .addColumn('actionId', 'integer', (col) => + col.notNull().references('moderation_action.id'), + ) + .addColumn('cid', 'varchar', (col) => col.notNull()) + .addColumn('recordUri', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('moderation_action_subject_blob_pkey', [ + 'actionId', + 'cid', + 'recordUri', + ]) + .execute() + + const moderationReportBuilder = + dialect === 'pg' + ? db.schema + .createTable('moderation_report') + .addColumn('id', 'serial', (col) => col.primaryKey()) + : db.schema + .createTable('moderation_report') + .addColumn('id', 'integer', (col) => col.autoIncrement().primaryKey()) + await moderationReportBuilder + .addColumn('subjectType', 'varchar', (col) => col.notNull()) + .addColumn('subjectDid', 'varchar', (col) => col.notNull()) + .addColumn('subjectUri', 'varchar') + .addColumn('subjectCid', 'varchar') + .addColumn('reasonType', 'varchar', (col) => col.notNull()) + .addColumn('reason', 'text') + .addColumn('reportedByDid', 'varchar', (col) => col.notNull()) + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .execute() + + await db.schema + .createTable('moderation_report_resolution') + .addColumn('reportId', 'integer', (col) => + col.notNull().references('moderation_report.id'), + ) + .addColumn('actionId', 'integer', (col) => + col.notNull().references('moderation_action.id'), + ) + .addColumn('createdBy', 'varchar', (col) => col.notNull()) + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('moderation_report_resolution_pkey', [ + 'reportId', + 'actionId', + ]) + .execute() + await db.schema + .createIndex('moderation_report_resolution_action_id_idx') + .on('moderation_report_resolution') + .column('actionId') + .execute() + + await db.schema + .createTable('record') + .addColumn('uri', 'varchar', (col) => col.primaryKey()) + .addColumn('cid', 'varchar', (col) => col.notNull()) + .addColumn('did', 'varchar', (col) => col.notNull()) + .addColumn('collection', 'varchar', (col) => col.notNull()) + .addColumn('rkey', 'varchar', (col) => col.notNull()) + .addColumn('indexedAt', 'varchar', (col) => col.notNull()) + .addColumn('takedownId', 'varchar') + .execute() + await db.schema + .createIndex('record_did_cid_idx') + .on('record') + .columns(['did', 'cid']) + .execute() + await db.schema + .createIndex('record_did_collection_idx') + .on('record') + .columns(['did', 'collection']) + .execute() + + await db.schema + .createTable('refresh_token') + .addColumn('id', 'varchar', (col) => col.primaryKey()) + .addColumn('did', 'varchar', (col) => col.notNull()) + .addColumn('expiresAt', 'varchar', (col) => col.notNull()) + .addColumn('nextId', 'varchar') + .addColumn('appPasswordName', 'varchar') + .execute() + await db.schema // Aids in refresh token cleanup + .createIndex('refresh_token_did_idx') + .on('refresh_token') + .column('did') + .execute() + + await db.schema + .createTable('repo_blob') + .addColumn('cid', 'varchar', (col) => col.notNull()) + .addColumn('recordUri', 'varchar', (col) => col.notNull()) + .addColumn('commit', 'varchar', (col) => col.notNull()) + .addColumn('did', 'varchar', (col) => col.notNull()) + .addColumn('takedownId', 'varchar') + .addPrimaryKeyConstraint(`repo_blob_pkey`, ['cid', 'recordUri']) + .execute() + await db.schema // supports rebase + .createIndex('repo_blob_did_idx') + .on('repo_blob') + .column('did') + .execute() + + await db.schema + .createTable('repo_commit_block') + .addColumn('creator', 'varchar', (col) => col.notNull()) + .addColumn('commit', 'varchar', (col) => col.notNull()) + .addColumn('block', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('repo_commit_block_pkey', [ + 'creator', + 'commit', + 'block', + ]) + .execute() + + await db.schema + .createTable('repo_commit_history') + .addColumn('creator', 'varchar', (col) => col.notNull()) + .addColumn('commit', 'varchar', (col) => col.notNull()) + .addColumn('prev', 'varchar') + .addPrimaryKeyConstraint('repo_commit_history_pkey', ['creator', 'commit']) + .execute() + + await db.schema + .createTable('repo_root') + .addColumn('did', 'varchar', (col) => col.primaryKey()) + .addColumn('root', 'varchar', (col) => col.notNull()) + .addColumn('indexedAt', 'varchar', (col) => col.notNull()) + .addColumn('takedownId', 'varchar') + .execute() + + // @TODO renamed indexes for consistency + const repoSeqBuilder = + dialect === 'pg' + ? db.schema + .createTable('repo_seq') + .addColumn('id', 'bigserial', (col) => col.primaryKey()) + .addColumn('seq', 'bigint', (col) => col.unique()) + : db.schema + .createTable('repo_seq') + .addColumn('id', 'integer', (col) => col.autoIncrement().primaryKey()) + .addColumn('seq', 'integer', (col) => col.unique()) + await repoSeqBuilder + .addColumn('did', 'varchar', (col) => col.notNull()) + .addColumn('eventType', 'varchar', (col) => col.notNull()) + .addColumn('event', binaryDatatype, (col) => col.notNull()) + .addColumn('invalidated', 'int2', (col) => col.notNull().defaultTo(0)) + .addColumn('sequencedAt', 'varchar', (col) => col.notNull()) + .execute() + // for filtering seqs based on did + await db.schema + .createIndex('repo_seq_did_idx') + .on('repo_seq') + .column('did') + .execute() + // for filtering seqs based on event type + await db.schema + .createIndex('repo_seq_event_type_idx') + .on('repo_seq') + .column('eventType') + .execute() + // for entering into the seq stream at a particular time + await db.schema + .createIndex('repo_seq_sequenced_at_index') + .on('repo_seq') + .column('sequencedAt') + .execute() + + await db.schema + .createTable('user_account') + .addColumn('did', 'varchar', (col) => col.primaryKey()) + .addColumn('email', 'varchar', (col) => col.notNull()) + .addColumn('passwordScrypt', 'varchar', (col) => col.notNull()) + .addColumn('createdAt', 'varchar', (col) => col.notNull()) + .addColumn('passwordResetToken', 'varchar') + .addColumn('passwordResetGrantedAt', 'varchar') + .addColumn('invitesDisabled', 'int2', (col) => col.notNull().defaultTo(0)) + .execute() + await db.schema + .createIndex(`user_account_email_lower_idx`) + .unique() + .on('user_account') + .expression(sql`lower("email")`) + .execute() + await db.schema + .createIndex('user_account_password_reset_token_idx') + .unique() + .on('user_account') + .column('passwordResetToken') + .execute() + + const userPrefBuilder = + dialect === 'pg' + ? db.schema + .createTable('user_pref') + .addColumn('id', 'bigserial', (col) => col.primaryKey()) + : db.schema + .createTable('user_pref') + .addColumn('id', 'integer', (col) => col.autoIncrement().primaryKey()) + await userPrefBuilder + .addColumn('did', 'varchar', (col) => col.notNull()) + .addColumn('name', 'varchar', (col) => col.notNull()) + .addColumn('valueJson', 'text', (col) => col.notNull()) + .execute() + await db.schema + .createIndex('user_pref_did_idx') + .on('user_pref') + .column('did') + .execute() +} + +export async function down(db: Kysely): Promise { + await db.schema.dropTable('user_pref').execute() + await db.schema.dropTable('user_account').execute() + await db.schema.dropTable('repo_seq').execute() + await db.schema.dropTable('repo_root').execute() + await db.schema.dropTable('repo_commit_history').execute() + await db.schema.dropTable('repo_commit_block').execute() + await db.schema.dropTable('repo_blob').execute() + await db.schema.dropTable('refresh_token').execute() + await db.schema.dropTable('record').execute() + await db.schema.dropTable('moderation_report_resolution').execute() + await db.schema.dropTable('moderation_report').execute() + await db.schema.dropTable('moderation_action_subject_blob').execute() + await db.schema.dropTable('moderation_action').execute() + await db.schema.dropTable('ipld_block').execute() + await db.schema.dropTable('invite_code_use').execute() + await db.schema.dropTable('invite_code').execute() + await db.schema.dropTable('did_handle').execute() + await db.schema.dropTable('did_cache').execute() + await db.schema.dropTable('delete_account_token').execute() + await db.schema.dropTable('blob').execute() + await db.schema.dropTable('backlink').execute() + await db.schema.dropTable('app_password').execute() + await db.schema.dropTable('app_migration').execute() +} diff --git a/packages/pds/src/db/migrations/20230703T044601833Z-feed-and-label-indices.ts b/packages/pds/src/db/migrations/20230703T044601833Z-feed-and-label-indices.ts deleted file mode 100644 index 530b833c757..00000000000 --- a/packages/pds/src/db/migrations/20230703T044601833Z-feed-and-label-indices.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createIndex('label_cts_idx') - .on('label') - .column('cts') - .execute() - await db.schema - .createIndex('feed_item_originator_cursor_idx') - .on('feed_item') - .columns(['originatorDid', 'sortAt', 'cid']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('label_cts_idx').execute() - await db.schema.dropIndex('feed_item_originator_cursor_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230718T170914772Z-sequencer-leader-sequence.ts b/packages/pds/src/db/migrations/20230718T170914772Z-sequencer-leader-sequence.ts deleted file mode 100644 index aae6db339b9..00000000000 --- a/packages/pds/src/db/migrations/20230718T170914772Z-sequencer-leader-sequence.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - if (dialect === 'sqlite') return - const res = await db - .selectFrom('repo_seq') - .select('seq') - .where('seq', 'is not', null) - .orderBy('seq', 'desc') - .limit(1) - .executeTakeFirst() - const startAt = res?.seq ? res.seq + 50000 : 1 - await sql`CREATE SEQUENCE repo_seq_sequence START ${sql.literal( - startAt, - )};`.execute(db) -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - if (dialect === 'sqlite') return - await sql`DROP SEQUENCE repo_seq_sequence;`.execute(db) -} diff --git a/packages/pds/src/db/migrations/20230727T172043676Z-user-account-cursor-idx.ts b/packages/pds/src/db/migrations/20230727T172043676Z-user-account-cursor-idx.ts deleted file mode 100644 index 15f38eafd65..00000000000 --- a/packages/pds/src/db/migrations/20230727T172043676Z-user-account-cursor-idx.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createIndex('user_account_cursor_idx') - .on('user_account') - .columns(['createdAt', 'did']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('user_account_cursor_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230801T141349990Z-invite-note.ts b/packages/pds/src/db/migrations/20230801T141349990Z-invite-note.ts deleted file mode 100644 index c83a3030350..00000000000 --- a/packages/pds/src/db/migrations/20230801T141349990Z-invite-note.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable('user_account') - .addColumn('inviteNote', 'varchar') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.alterTable('user_account').dropColumn('inviteNote').execute() -} diff --git a/packages/pds/src/db/migrations/20230801T195109532Z-remove-moderation-fkeys.ts b/packages/pds/src/db/migrations/20230801T195109532Z-remove-moderation-fkeys.ts deleted file mode 100644 index 81fd86d2b74..00000000000 --- a/packages/pds/src/db/migrations/20230801T195109532Z-remove-moderation-fkeys.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { Kysely } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - if (dialect === 'sqlite') { - return - } - await db.schema - .alterTable('repo_root') - .dropConstraint('repo_root_takedown_id_fkey') - .execute() - await db.schema - .alterTable('record') - .dropConstraint('record_takedown_id_fkey') - .execute() - await db.schema - .alterTable('repo_blob') - .dropConstraint('repo_blob_takedown_id_fkey') - .execute() -} - -export async function down( - db: Kysely, - dialect: Dialect, -): Promise { - if (dialect === 'sqlite') { - return - } - await db.schema - .alterTable('repo_root') - .addForeignKeyConstraint( - 'repo_root_takedown_id_fkey', - ['takedownId'], - 'moderation_action', - ['id'], - ) - .execute() - await db.schema - .alterTable('record') - .addForeignKeyConstraint( - 'record_takedown_id_fkey', - ['takedownId'], - 'moderation_action', - ['id'], - ) - .execute() - await db.schema - .alterTable('repo_blob') - .addForeignKeyConstraint( - 'repo_blob_takedown_id_fkey', - ['takedownId'], - 'moderation_action', - ['id'], - ) - .execute() -} diff --git a/packages/pds/src/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts b/packages/pds/src/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts deleted file mode 100644 index 1cad46fcfb4..00000000000 --- a/packages/pds/src/db/migrations/20230807T035309811Z-feed-item-delete-invite-for-user-idx.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - // supports post deletion - await db.schema - .createIndex('feed_item_post_uri_idx') - .on('feed_item') - .column('postUri') - .execute() - // supports listing user invites - await db.schema - .createIndex('invite_code_for_user_idx') - .on('invite_code') - .column('forUser') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('feed_item_post_uri_idx').execute() - await db.schema.dropIndex('invite_code_for_user_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230808T172813122Z-repo-rev.ts b/packages/pds/src/db/migrations/20230808T172813122Z-repo-rev.ts deleted file mode 100644 index e4c17d73291..00000000000 --- a/packages/pds/src/db/migrations/20230808T172813122Z-repo-rev.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema.alterTable('record').addColumn('repoRev', 'varchar').execute() - await db.schema - .createIndex('record_repo_rev_idx') - .on('record') - .columns(['did', 'repoRev']) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('record_repo_rev_idx').execute() - await db.schema.alterTable('record').dropColumn('repoRev').execute() -} diff --git a/packages/pds/src/db/migrations/20230810T203412859Z-action-duration.ts b/packages/pds/src/db/migrations/20230810T203412859Z-action-duration.ts deleted file mode 100644 index 0530d4d74fd..00000000000 --- a/packages/pds/src/db/migrations/20230810T203412859Z-action-duration.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .alterTable('moderation_action') - .addColumn('durationInHours', 'integer') - .execute() - await db.schema - .alterTable('moderation_action') - .addColumn('expiresAt', 'varchar') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .alterTable('moderation_action') - .dropColumn('durationInHours') - .execute() - await db.schema - .alterTable('moderation_action') - .dropColumn('expiresAt') - .execute() -} diff --git a/packages/pds/src/db/migrations/20230818T134357818Z-runtime-flags.ts b/packages/pds/src/db/migrations/20230818T134357818Z-runtime-flags.ts deleted file mode 100644 index c93ccd74158..00000000000 --- a/packages/pds/src/db/migrations/20230818T134357818Z-runtime-flags.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createTable('runtime_flag') - .addColumn('name', 'varchar', (col) => col.primaryKey()) - .addColumn('value', 'varchar', (col) => col.notNull()) - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropTable('runtime_flag').execute() -} diff --git a/packages/pds/src/db/migrations/20230824T182048120Z-remove-post-hierarchy.ts b/packages/pds/src/db/migrations/20230824T182048120Z-remove-post-hierarchy.ts deleted file mode 100644 index 8144534af07..00000000000 --- a/packages/pds/src/db/migrations/20230824T182048120Z-remove-post-hierarchy.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { Kysely, sql } from 'kysely' -import { Dialect } from '..' - -export async function up(db: Kysely, dialect: Dialect): Promise { - await db.schema.dropTable('post_hierarchy').execute() - // recreate index that calculates e.g. "replyCount", turning it into a covering index - // for uri so that recursive query for post descendents can use an index-only scan. - if (dialect === 'pg') { - await sql`create index "post_replyparent_uri_idx" on "post" ("replyParent") include ("uri")`.execute( - db, - ) - } else { - // in sqlite, just index on uri as well - await db.schema - .createIndex('post_replyparent_uri_idx') - .on('post') - .columns(['replyParent', 'uri']) - .execute() - } - await db.schema.dropIndex('post_replyparent_idx').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('post_hierarchy') - .addColumn('uri', 'varchar', (col) => col.notNull()) - .addColumn('ancestorUri', 'varchar', (col) => col.notNull()) - .addColumn('depth', 'integer', (col) => col.notNull()) - .addPrimaryKeyConstraint('post_hierarchy_pkey', ['uri', 'ancestorUri']) - .execute() - await db.schema - .createIndex('post_hierarchy_ancestoruri_idx') - .on('post_hierarchy') - .column('ancestorUri') - .execute() - await db.schema.dropIndex('post_replyparent_uri_idx').execute() - await db.schema - .createIndex('post_replyparent_idx') - .on('post') - .column('replyParent') - .execute() -} diff --git a/packages/pds/src/db/migrations/20230825T142507884Z-blob-tempkey-idx.ts b/packages/pds/src/db/migrations/20230825T142507884Z-blob-tempkey-idx.ts deleted file mode 100644 index 42cb843661e..00000000000 --- a/packages/pds/src/db/migrations/20230825T142507884Z-blob-tempkey-idx.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema - .createIndex('blob_tempkey_idx') - .on('blob') - .column('tempKey') - .execute() -} - -export async function down(db: Kysely): Promise { - await db.schema.dropIndex('blob_tempkey_idx').execute() -} diff --git a/packages/pds/src/db/migrations/20230828T153013575Z-repo-history-rewrite.ts b/packages/pds/src/db/migrations/20230828T153013575Z-repo-history-rewrite.ts deleted file mode 100644 index 368d7cbdbe5..00000000000 --- a/packages/pds/src/db/migrations/20230828T153013575Z-repo-history-rewrite.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema.alterTable('repo_root').addColumn('rev', 'varchar').execute() - await db.schema - .alterTable('ipld_block') - .addColumn('repoRev', 'varchar') - .execute() - await db.schema - .alterTable('repo_blob') - .addColumn('repoRev', 'varchar') - .execute() - await db.schema.alterTable('repo_blob').dropColumn('commit').execute() - - await db.schema - .createIndex('ipld_block_repo_rev_idx') - .on('ipld_block') - .columns(['creator', 'repoRev', 'cid']) - .execute() - - await db.schema - .createIndex('repo_blob_repo_rev_idx') - .on('repo_blob') - .columns(['did', 'repoRev']) - .execute() - - await db.schema.dropTable('repo_commit_history').execute() - await db.schema.dropTable('repo_commit_block').execute() -} - -export async function down(db: Kysely): Promise { - await db.schema - .createTable('repo_commit_block') - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('block', 'varchar', (col) => col.notNull()) - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('repo_commit_block_pkey', [ - 'creator', - 'commit', - 'block', - ]) - .execute() - await db.schema - .createTable('repo_commit_history') - .addColumn('commit', 'varchar', (col) => col.notNull()) - .addColumn('prev', 'varchar') - .addColumn('creator', 'varchar', (col) => col.notNull()) - .addPrimaryKeyConstraint('repo_commit_history_pkey', ['creator', 'commit']) - .execute() - - await db.schema.dropIndex('ipld_block_repo_rev_idx').execute() - - await db.schema.dropIndex('repo_blob_repo_rev_idx').execute() - - await db.schema.alterTable('repo_root').dropColumn('rev').execute() - await db.schema.alterTable('ipld_block').dropColumn('repoRev').execute() - await db.schema.alterTable('repo_blob').dropColumn('repoRev').execute() - await db.schema - .alterTable('repo_blob') - .addColumn('commit', 'varchar') - .execute() -} diff --git a/packages/pds/src/db/migrations/20230914T014727199Z-repo-v3.ts b/packages/pds/src/db/migrations/20230914T014727199Z-repo-v3.ts new file mode 100644 index 00000000000..cd9569bc33c --- /dev/null +++ b/packages/pds/src/db/migrations/20230914T014727199Z-repo-v3.ts @@ -0,0 +1,165 @@ +import { Kysely, sql } from 'kysely' +import { Dialect } from '..' + +export async function up(db: Kysely, dialect: Dialect): Promise { + // sequencer leader sequence + if (dialect !== 'sqlite') { + const res = await db + .selectFrom('repo_seq') + .select('seq') + .where('seq', 'is not', null) + .orderBy('seq', 'desc') + .limit(1) + .executeTakeFirst() + const startAt = res?.seq ? res.seq + 50000 : 1 + await sql`CREATE SEQUENCE repo_seq_sequence START ${sql.literal( + startAt, + )};`.execute(db) + } + + // user account cursor idx + await db.schema + .createIndex('user_account_cursor_idx') + .on('user_account') + .columns(['createdAt', 'did']) + .execute() + + // invite note + await db.schema + .alterTable('user_account') + .addColumn('inviteNote', 'varchar') + .execute() + + // listing user invites + await db.schema + .createIndex('invite_code_for_user_idx') + .on('invite_code') + .column('forUser') + .execute() + + // mod action duration + await db.schema + .alterTable('moderation_action') + .addColumn('durationInHours', 'integer') + .execute() + await db.schema + .alterTable('moderation_action') + .addColumn('expiresAt', 'varchar') + .execute() + + // runtime flag + await db.schema + .createTable('runtime_flag') + .addColumn('name', 'varchar', (col) => col.primaryKey()) + .addColumn('value', 'varchar', (col) => col.notNull()) + .execute() + + // blob tempkey idx + await db.schema + .createIndex('blob_tempkey_idx') + .on('blob') + .column('tempKey') + .execute() + + // repo v3 + await db.schema.alterTable('repo_root').addColumn('rev', 'varchar').execute() + await db.schema.alterTable('record').addColumn('repoRev', 'varchar').execute() + await db.schema + .alterTable('ipld_block') + .addColumn('repoRev', 'varchar') + .execute() + await db.schema + .alterTable('repo_blob') + .addColumn('repoRev', 'varchar') + .execute() + await db.schema.alterTable('repo_blob').dropColumn('commit').execute() + + await db.schema + .createIndex('record_repo_rev_idx') + .on('record') + .columns(['did', 'repoRev']) + .execute() + + await db.schema + .createIndex('ipld_block_repo_rev_idx') + .on('ipld_block') + .columns(['creator', 'repoRev', 'cid']) + .execute() + + await db.schema + .createIndex('repo_blob_repo_rev_idx') + .on('repo_blob') + .columns(['did', 'repoRev']) + .execute() + + await db.schema.dropTable('repo_commit_history').execute() + await db.schema.dropTable('repo_commit_block').execute() +} + +export async function down( + db: Kysely, + dialect: Dialect, +): Promise { + // repo v3 + await db.schema + .createTable('repo_commit_block') + .addColumn('commit', 'varchar', (col) => col.notNull()) + .addColumn('block', 'varchar', (col) => col.notNull()) + .addColumn('creator', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('repo_commit_block_pkey', [ + 'creator', + 'commit', + 'block', + ]) + .execute() + await db.schema + .createTable('repo_commit_history') + .addColumn('commit', 'varchar', (col) => col.notNull()) + .addColumn('prev', 'varchar') + .addColumn('creator', 'varchar', (col) => col.notNull()) + .addPrimaryKeyConstraint('repo_commit_history_pkey', ['creator', 'commit']) + .execute() + + await db.schema.dropIndex('record_repo_rev_idx').execute() + await db.schema.dropIndex('ipld_block_repo_rev_idx').execute() + await db.schema.dropIndex('repo_blob_repo_rev_idx').execute() + + await db.schema.alterTable('repo_root').dropColumn('rev').execute() + await db.schema.alterTable('record').dropColumn('repoRev').execute() + await db.schema.alterTable('ipld_block').dropColumn('repoRev').execute() + await db.schema.alterTable('repo_blob').dropColumn('repoRev').execute() + await db.schema + .alterTable('repo_blob') + .addColumn('commit', 'varchar') + .execute() + + // blob tempkey idx + await db.schema.dropIndex('blob_tempkey_idx').execute() + + // runtime flag + await db.schema.dropTable('runtime_flag').execute() + + // mod action duration + await db.schema + .alterTable('moderation_action') + .dropColumn('durationInHours') + .execute() + await db.schema + .alterTable('moderation_action') + .dropColumn('expiresAt') + .execute() + + // listing user invites + await db.schema.dropIndex('invite_code_for_user_idx').execute() + + // invite note + await db.schema.alterTable('user_account').dropColumn('inviteNote').execute() + + // user account cursor idx + await db.schema.dropIndex('user_account_cursor_idx').execute() + + // sequencer leader sequence + if (dialect !== 'sqlite') { + await sql`DROP SEQUENCE repo_seq_sequence;`.execute(db) + } +} diff --git a/packages/pds/src/db/migrations/20230922T033938477Z-remove-appview.ts b/packages/pds/src/db/migrations/20230922T033938477Z-remove-appview.ts deleted file mode 100644 index f66825fa722..00000000000 --- a/packages/pds/src/db/migrations/20230922T033938477Z-remove-appview.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Kysely } from 'kysely' - -export async function up(db: Kysely): Promise { - await db.schema.dropView('algo_whats_hot_view').materialized().execute() - await db.schema.dropTable('actor_block').execute() - await db.schema.dropTable('duplicate_record').execute() - await db.schema.dropTable('feed_generator').execute() - await db.schema.dropTable('feed_item').execute() - await db.schema.dropTable('follow').execute() - await db.schema.dropTable('label').execute() - await db.schema.dropTable('like').execute() - await db.schema.dropTable('list_item').execute() - await db.schema.dropTable('list').execute() - await db.schema.dropTable('post_agg').execute() - await db.schema.dropTable('post_embed_image').execute() - await db.schema.dropTable('post_embed_external').execute() - await db.schema.dropTable('post_embed_record').execute() - await db.schema.dropTable('post').execute() - await db.schema.dropTable('profile_agg').execute() - await db.schema.dropTable('profile').execute() - await db.schema.dropTable('repost').execute() - await db.schema.dropTable('subscription').execute() - await db.schema.dropTable('suggested_follow').execute() - await db.schema.dropTable('view_param').execute() -} - -export async function down(_db: Kysely): Promise { - // Migration code -} diff --git a/packages/pds/src/db/migrations/20230926T195532354Z-email-tokens.ts b/packages/pds/src/db/migrations/20230926T195532354Z-email-tokens.ts index ce8e6574731..44cefc18899 100644 --- a/packages/pds/src/db/migrations/20230926T195532354Z-email-tokens.ts +++ b/packages/pds/src/db/migrations/20230926T195532354Z-email-tokens.ts @@ -10,13 +10,30 @@ export async function up(db: Kysely, dialect: Dialect): Promise { .addColumn('token', 'varchar', (col) => col.notNull()) .addColumn('requestedAt', timestamp, (col) => col.notNull()) .addPrimaryKeyConstraint('email_token_pkey', ['purpose', 'did']) - .addUniqueConstraint('email_token_token_unique', ['purpose', 'token']) + .addUniqueConstraint('email_token_purpose_token_unique', [ + 'purpose', + 'token', + ]) .execute() await db.schema .alterTable('user_account') .addColumn('emailConfirmedAt', 'varchar') .execute() + + await db.schema.dropIndex('user_account_password_reset_token_idx').execute() + + await db.schema + .alterTable('user_account') + .dropColumn('passwordResetToken') + .execute() + + await db.schema + .alterTable('user_account') + .dropColumn('passwordResetGrantedAt') + .execute() + + await db.schema.dropTable('delete_account_token').execute() } export async function down(db: Kysely): Promise { @@ -25,4 +42,28 @@ export async function down(db: Kysely): Promise { .alterTable('user_account') .dropColumn('emailConfirmedAt') .execute() + + await db.schema + .createIndex('user_account_password_reset_token_idx') + .unique() + .on('user_account') + .column('passwordResetToken') + .execute() + + await db.schema + .alterTable('user_account') + .addColumn('passwordResetToken', 'varchar') + .execute() + + await db.schema + .alterTable('user_account') + .addColumn('passwordResetGrantedAt', 'varchar') + .execute() + + await db.schema + .createTable('delete_account_token') + .addColumn('did', 'varchar', (col) => col.primaryKey()) + .addColumn('token', 'varchar', (col) => col.notNull()) + .addColumn('requestedAt', 'varchar', (col) => col.notNull()) + .execute() } diff --git a/packages/pds/src/db/migrations/20230929T213219699Z-takedown-id-as-int.ts b/packages/pds/src/db/migrations/20230929T213219699Z-takedown-id-as-int.ts new file mode 100644 index 00000000000..8cacc599c60 --- /dev/null +++ b/packages/pds/src/db/migrations/20230929T213219699Z-takedown-id-as-int.ts @@ -0,0 +1,47 @@ +import { Kysely, sql } from 'kysely' +import { Dialect } from '..' + +export async function up(db: Kysely, dialect: Dialect): Promise { + if (dialect === 'pg') { + await sql` + alter table "repo_root" alter column "takedownId" type integer using "takedownId"::integer; + alter table "repo_blob" alter column "takedownId" type integer using "takedownId"::integer; + alter table "record" alter column "takedownId" type integer using "takedownId"::integer; + `.execute(db) + } else { + await sql`alter table "repo_root" drop column "takedownId"`.execute(db) + await sql`alter table "repo_root" add column "takedownId" integer`.execute( + db, + ) + await sql`alter table "repo_blob" drop column "takedownId"`.execute(db) + await sql`alter table "repo_blob" add column "takedownId" integer`.execute( + db, + ) + await sql`alter table "record" drop column "takedownId"`.execute(db) + await sql`alter table "record" add column "takedownId" integer`.execute(db) + } +} + +export async function down( + db: Kysely, + dialect: Dialect, +): Promise { + if (dialect === 'pg') { + await sql` + alter table "repo_root" alter column "takedownId" type varchar; + alter table "repo_blob" alter column "takedownId" type varchar; + alter table "record" alter column "takedownId" type varchar; + `.execute(db) + } else { + await sql`alter table "repo_root" drop column "takedownId"`.execute(db) + await sql`alter table "repo_root" add column "takedownId" varchar`.execute( + db, + ) + await sql`alter table "repo_blob" drop column "takedownId"`.execute(db) + await sql`alter table "repo_blob" add column "takedownId" varchar`.execute( + db, + ) + await sql`alter table "record" drop column "takedownId"`.execute(db) + await sql`alter table "record" add column "takedownId" varchar`.execute(db) + } +} diff --git a/packages/pds/src/db/migrations/index.ts b/packages/pds/src/db/migrations/index.ts index 3636d304e46..9aead0d7012 100644 --- a/packages/pds/src/db/migrations/index.ts +++ b/packages/pds/src/db/migrations/index.ts @@ -2,68 +2,7 @@ // It's important that every migration is exported from here with the proper name. We'd simplify // this with kysely's FileMigrationProvider, but it doesn't play nicely with the build process. -export * as _20221021T162202001Z from './20221021T162202001Z-init' -export * as _20221116T234458063Z from './20221116T234458063Z-duplicate-records' -export * as _20221202T212459280Z from './20221202T212459280Z-blobs' -export * as _20221209T210026294Z from './20221209T210026294Z-banners' -export * as _20221212T195416407Z from './20221212T195416407Z-post-media' -export * as _20221215T220356370Z from './20221215T220356370Z-password-reset-otp' -export * as _20221226T213635517Z from './20221226T213635517Z-mute-init' -export * as _20221230T215012029Z from './20221230T215012029Z-moderation-init' -export * as _20230127T215753149Z from './20230127T215753149Z-indexed-at-on-record' -export * as _20230127T224743452Z from './20230127T224743452Z-repo-sync-data-pt1' -export * as _20230201T200606704Z from './20230201T200606704Z-repo-sync-data-pt2' -export * as _20230202T170426672Z from './20230202T170426672Z-user-partitioned-cids' -export * as _20230202T170435937Z from './20230202T170435937Z-delete-account-token' -export * as _20230202T172831900Z from './20230202T172831900Z-moderation-subject-blob' -export * as _20230202T213952826Z from './20230202T213952826Z-repo-seq' -export * as _20230208T081544325Z from './20230208T081544325Z-post-hydrate-indices' -export * as _20230208T222001557Z from './20230208T222001557Z-user-table-did-pkey' -export * as _20230210T210132396Z from './20230210T210132396Z-post-hierarchy' -export * as _20230214T172233550Z from './20230214T172233550Z-embed-records' -export * as _20230301T222603402Z from './20230301T222603402Z-repo-ops' -export * as _20230304T193548198Z from './20230304T193548198Z-pagination-indices' -export * as _20230308T234640077Z from './20230308T234640077Z-record-indexes' -export * as _20230309T012947663Z from './20230309T012947663Z-app-migration' -export * as _20230310T205728933Z from './20230310T205728933Z-subscription-init' -export * as _20230313T232322844Z from './20230313T232322844Z-blob-creator' -export * as _20230314T023842127Z from './20230314T023842127Z-refresh-grace-period' -export * as _20230323T162732466Z from './20230323T162732466Z-remove-scenes' -export * as _20230328T214311000Z from './20230328T214311000Z-remove-declarations-assertions-confirmations' -export * as _20230328T214311001Z from './20230328T214311001Z-votes-to-likes' -export * as _20230328T214311002Z from './20230328T214311002Z-remove-post-entities' -export * as _20230328T214311003Z from './20230328T214311003Z-backlinks' -export * as _20230328T214311004Z from './20230328T214311004Z-profile-display-name-empty' -export * as _20230328T214311005Z from './20230328T214311005Z-rework-seq' -export * as _20230406T185855842Z from './20230406T185855842Z-feed-item-init' -export * as _20230411T175730759Z from './20230411T175730759Z-drop-message-queue' -export * as _20230411T180247652Z from './20230411T180247652Z-labels' -export * as _20230412T231807162Z from './20230412T231807162Z-moderation-action-labels' -export * as _20230416T221236745Z from './20230416T221236745Z-app-specific-passwords' -export * as _20230420T143821201Z from './20230420T143821201Z-post-profile-aggs' -export * as _20230427T194652255Z from './20230427T194652255Z-notif-record-index' -export * as _20230428T195614638Z from './20230428T195614638Z-actor-block-init' -export * as _20230508T193807762Z from './20230508T193807762Z-acct-deletion-indexes' -export * as _20230508T232711152Z from './20230508T232711152Z-disable-account-invites' -export * as _20230509T192324175Z from './20230509T192324175Z-seq-invalidated' -export * as _20230511T154721392Z from './20230511T154721392Z-mute-lists' -export * as _20230511T171739449Z from './20230511T171739449Z-actor-preferences' -export * as _20230511T200212974Z from './20230511T200212974Z-feed-generators' -export * as _20230523T183902064Z from './20230523T183902064Z-algo-whats-hot-view' -export * as _20230529T222706121Z from './20230529T222706121Z-suggested-follows' -export * as _20230530T213530067Z from './20230530T213530067Z-rebase-indices' -export * as _20230605T235529700Z from './20230605T235529700Z-outgoing-repo-seq' -export * as _20230703T044601833Z from './20230703T044601833Z-feed-and-label-indices' -export * as _20230718T170914772Z from './20230718T170914772Z-sequencer-leader-sequence' -export * as _20230727T172043676Z from './20230727T172043676Z-user-account-cursor-idx' -export * as _20230801T141349990Z from './20230801T141349990Z-invite-note' -export * as _20230801T195109532Z from './20230801T195109532Z-remove-moderation-fkeys' -export * as _20230807T035309811Z from './20230807T035309811Z-feed-item-delete-invite-for-user-idx' -export * as _20230808T172813122Z from './20230808T172813122Z-repo-rev' -export * as _20230810T203412859Z from './20230810T203412859Z-action-duration' -export * as _20230818T134357818Z from './20230818T134357818Z-runtime-flags' -export * as _20230824T182048120Z from './20230824T182048120Z-remove-post-hierarchy' -export * as _20230825T142507884Z from './20230825T142507884Z-blob-tempkey-idx' -export * as _20230828T153013575Z from './20230828T153013575Z-repo-history-rewrite' -export * as _20230922T033938477Z from './20230922T033938477Z-remove-appview' +export * as _20230613T164932261Z from './20230613T164932261Z-init' +export * as _20230914T014727199Z from './20230914T014727199Z-repo-v3' export * as _20230926T195532354Z from './20230926T195532354Z-email-tokens' +export * as _20230929T213219699Z from './20230929T213219699Z-takedown-id-as-int' diff --git a/packages/pds/src/db/tables/delete-account-token.ts b/packages/pds/src/db/tables/delete-account-token.ts deleted file mode 100644 index da748c639a7..00000000000 --- a/packages/pds/src/db/tables/delete-account-token.ts +++ /dev/null @@ -1,9 +0,0 @@ -export interface DeleteAccountToken { - did: string - token: string - requestedAt: string -} - -export const tableName = 'delete_account_token' - -export type PartialDB = { [tableName]: DeleteAccountToken } diff --git a/packages/pds/src/db/tables/list-mute.ts b/packages/pds/src/db/tables/list-mute.ts deleted file mode 100644 index 6678035805b..00000000000 --- a/packages/pds/src/db/tables/list-mute.ts +++ /dev/null @@ -1,9 +0,0 @@ -export const tableName = 'list_mute' - -export interface ListMute { - listUri: string - mutedByDid: string - createdAt: string -} - -export type PartialDB = { [tableName]: ListMute } diff --git a/packages/pds/src/db/tables/mute.ts b/packages/pds/src/db/tables/mute.ts deleted file mode 100644 index 4790c7aeced..00000000000 --- a/packages/pds/src/db/tables/mute.ts +++ /dev/null @@ -1,9 +0,0 @@ -export interface Mute { - did: string - mutedByDid: string - createdAt: string -} - -export const tableName = 'mute' - -export type PartialDB = { [tableName]: Mute } diff --git a/packages/pds/src/db/tables/user-account.ts b/packages/pds/src/db/tables/user-account.ts index ef9fdbecb3c..808663ca468 100644 --- a/packages/pds/src/db/tables/user-account.ts +++ b/packages/pds/src/db/tables/user-account.ts @@ -6,8 +6,6 @@ export interface UserAccount { passwordScrypt: string createdAt: string emailConfirmedAt: string | null - passwordResetToken: string | null - passwordResetGrantedAt: string | null invitesDisabled: Generated<0 | 1> inviteNote: string | null } diff --git a/packages/pds/src/db/tables/user-notification.ts b/packages/pds/src/db/tables/user-notification.ts deleted file mode 100644 index 547d27678f7..00000000000 --- a/packages/pds/src/db/tables/user-notification.ts +++ /dev/null @@ -1,21 +0,0 @@ -export const tableName = 'user_notification' - -export type NotificationReason = - | 'like' - | 'repost' - | 'follow' - | 'mention' - | 'reply' - | 'quote' - -export interface UserNotification { - userDid: string - recordUri: string - recordCid: string - author: string - reason: NotificationReason - reasonSubject: string | null - indexedAt: string -} - -export type PartialDB = { [tableName]: UserNotification } diff --git a/packages/pds/src/db/tables/user-state.ts b/packages/pds/src/db/tables/user-state.ts deleted file mode 100644 index bbd2b0e1e07..00000000000 --- a/packages/pds/src/db/tables/user-state.ts +++ /dev/null @@ -1,8 +0,0 @@ -export interface UserState { - did: string - lastSeenNotifs: string -} - -export const tableName = 'user_state' - -export type PartialDB = { [tableName]: UserState } diff --git a/packages/pds/src/db/views.ts b/packages/pds/src/db/views.ts deleted file mode 100644 index 8bf6c25b72b..00000000000 --- a/packages/pds/src/db/views.ts +++ /dev/null @@ -1,60 +0,0 @@ -import assert from 'assert' -import { wait } from '@atproto/common' -import { Leader } from './leader' -import { dbLogger } from '../logger' -import Database from '.' - -export const VIEW_MAINTAINER_ID = 1010 -const VIEWS = ['algo_whats_hot_view'] - -export class ViewMaintainer { - leader = new Leader(VIEW_MAINTAINER_ID, this.db) - destroyed = false - - // @NOTE the db must be authed as the owner of the materialized view, per postgres. - constructor(public db: Database, public intervalSec = 60) { - assert( - this.db.dialect === 'pg', - 'View maintainer can only be used with postgres', - ) - } - - async run() { - while (!this.destroyed) { - try { - const { ran } = await this.leader.run(async ({ signal }) => { - await this.db.maintainMaterializedViews({ - signal, - views: VIEWS, - intervalSec: this.intervalSec, - }) - }) - if (ran && !this.destroyed) { - throw new Error('View maintainer completed, but should be persistent') - } - } catch (err) { - dbLogger.error( - { - err, - views: VIEWS, - intervalSec: this.intervalSec, - lockId: VIEW_MAINTAINER_ID, - }, - 'view maintainer errored', - ) - } - if (!this.destroyed) { - await wait(10000 + jitter(2000)) - } - } - } - - destroy() { - this.destroyed = true - this.leader.destroy() - } -} - -function jitter(maxMs) { - return Math.round((Math.random() - 0.5) * maxMs * 2) -} diff --git a/packages/pds/src/env.ts b/packages/pds/src/env.ts deleted file mode 100644 index f26b2571b08..00000000000 --- a/packages/pds/src/env.ts +++ /dev/null @@ -1,9 +0,0 @@ -// NOTE: this file should be imported first, particularly before `@atproto/common` (for logging), to ensure that environment variables are respected in library code -import dotenv from 'dotenv' - -const env = process.env.ENV -if (env) { - dotenv.config({ path: `./.${env}.env` }) -} else { - dotenv.config() -} diff --git a/packages/pds/src/handle/index.ts b/packages/pds/src/handle/index.ts index deae5409945..ec531fcc80f 100644 --- a/packages/pds/src/handle/index.ts +++ b/packages/pds/src/handle/index.ts @@ -27,11 +27,11 @@ export const normalizeAndValidateHandle = async (opts: { 'InvalidHandle', ) } - if (isServiceDomain(handle, ctx.cfg.availableUserDomains)) { + if (isServiceDomain(handle, ctx.cfg.identity.serviceHandleDomains)) { // verify constraints on a service domain ensureHandleServiceConstraints( handle, - ctx.cfg.availableUserDomains, + ctx.cfg.identity.serviceHandleDomains, allowReserved, ) } else { diff --git a/packages/pds/src/image/index.ts b/packages/pds/src/image/index.ts index ebf47088f6b..4f70321f0ed 100644 --- a/packages/pds/src/image/index.ts +++ b/packages/pds/src/image/index.ts @@ -46,26 +46,13 @@ export async function getInfo(stream: Readable): Promise { return maybeInfo } -export type Options = Dimensions & { - format: 'jpeg' | 'png' - // When 'cover' (default), scale to fill given dimensions, cropping if necessary. - // When 'inside', scale to fit within given dimensions. - fit?: 'cover' | 'inside' - // When false (default), do not scale up. - // When true, scale up to hit dimensions given in options. - // Otherwise, scale up to hit specified min dimensions. - min?: Dimensions | boolean - // A number 1-100 - quality?: number -} +export type Dimensions = { height: number; width: number } export type ImageInfo = Dimensions & { size: number mime: `image/${string}` | 'unknown' } -export type Dimensions = { height: number; width: number } - export const formatsToMimes: { [s in keyof sharp.FormatEnum]?: `image/${string}` } = { diff --git a/packages/pds/src/index.ts b/packages/pds/src/index.ts index 687ec9699af..cc9e1555895 100644 --- a/packages/pds/src/index.ts +++ b/packages/pds/src/index.ts @@ -8,12 +8,6 @@ import express from 'express' import cors from 'cors' import http from 'http' import events from 'events' -import { createTransport } from 'nodemailer' -import { Redis } from 'ioredis' -import { AtpAgent } from '@atproto/api' -import * as crypto from '@atproto/crypto' -import { BlobStore } from '@atproto/repo' -import { IdResolver } from '@atproto/identity' import { RateLimiter, RateLimiterCreator, @@ -24,32 +18,20 @@ import { DAY, HOUR, MINUTE } from '@atproto/common' import API from './api' import * as basicRoutes from './basic-routes' import * as wellKnown from './well-known' -import Database from './db' -import { ServerAuth } from './auth' import * as error from './error' -import compression from './util/compression' import { dbLogger, loggerMiddleware, seqLogger } from './logger' -import { ServerConfig } from './config' -import { ServerMailer } from './mailer' -import { ModerationMailer } from './mailer/moderation' +import { ServerConfig, ServerSecrets } from './config' import { createServer } from './lexicon' -import { createServices } from './services' import { createHttpTerminator, HttpTerminator } from 'http-terminator' -import AppContext from './context' -import { Sequencer, SequencerLeader } from './sequencer' -import { BackgroundQueue } from './background' -import DidSqlCache from './did-cache' -import { Crawlers } from './crawlers' -import { getRedisClient } from './redis' -import { RuntimeFlags } from './runtime-flags' +import AppContext, { AppContextOptions } from './context' +import compression from './util/compression' -export type { ServerConfigValues } from './config' -export { ServerConfig } from './config' +export * from './config' export { Database } from './db' -export { ViewMaintainer } from './db/views' export { PeriodicModerationActionReversal } from './db/periodic-moderation-action-reversal' export { DiskBlobStore, MemoryBlobStore } from './storage' export { AppContext } from './context' +export { httpLogger } from './logger' export class PDS { public ctx: AppContext @@ -64,130 +46,46 @@ export class PDS { this.app = opts.app } - static create(opts: { - db: Database - blobstore: BlobStore - repoSigningKey: crypto.Keypair - plcRotationKey: crypto.Keypair - config: ServerConfig - }): PDS { - const { db, blobstore, repoSigningKey, plcRotationKey, config } = opts - const auth = new ServerAuth({ - jwtSecret: config.jwtSecret, - adminPass: config.adminPassword, - moderatorPass: config.moderatorPassword, - triagePass: config.triagePassword, - }) - - const didCache = new DidSqlCache( - db, - config.didCacheStaleTTL, - config.didCacheMaxTTL, - ) - const idResolver = new IdResolver({ - plcUrl: config.didPlcUrl, - didCache, - backupNameservers: config.handleResolveNameservers, - }) - - const sequencer = new Sequencer(db) - const sequencerLeader = config.sequencerLeaderEnabled - ? new SequencerLeader(db, config.sequencerLeaderLockId) - : null - - const serverMailTransport = - config.emailSmtpUrl !== undefined - ? createTransport(config.emailSmtpUrl) - : createTransport({ jsonTransport: true }) - - const moderationMailTransport = - config.moderationEmailSmtpUrl !== undefined - ? createTransport(config.moderationEmailSmtpUrl) - : createTransport({ jsonTransport: true }) - - const mailer = new ServerMailer(serverMailTransport, config) - const moderationMailer = new ModerationMailer( - moderationMailTransport, - config, - ) - + static async create( + cfg: ServerConfig, + secrets: ServerSecrets, + overrides?: Partial, + ): Promise { const app = express() app.set('trust proxy', true) app.use(cors()) app.use(loggerMiddleware) app.use(compression()) - const backgroundQueue = new BackgroundQueue(db) - const crawlers = new Crawlers( - config.hostname, - config.crawlersToNotify ?? [], - ) - - const appviewAgent = new AtpAgent({ service: config.bskyAppViewEndpoint }) - - const services = createServices({ - repoSigningKey, - blobstore, - appviewAgent, - appviewDid: config.bskyAppViewDid, - appviewCdnUrlPattern: config.bskyAppViewCdnUrlPattern, - backgroundQueue, - crawlers, - }) - - const runtimeFlags = new RuntimeFlags(db) - - let redisScratch: Redis | undefined = undefined - if (config.redisScratchAddress) { - redisScratch = getRedisClient( - config.redisScratchAddress, - config.redisScratchPassword, - ) - } - - const ctx = new AppContext({ - db, - blobstore, - redisScratch, - repoSigningKey, - plcRotationKey, - idResolver, - didCache, - cfg: config, - auth, - sequencer, - sequencerLeader, - runtimeFlags, - services, - mailer, - moderationMailer, - backgroundQueue, - appviewAgent, - crawlers, - }) + const ctx = await AppContext.fromConfig(cfg, secrets, overrides) const xrpcOpts: XrpcServerOptions = { - validateResponse: config.debugMode, + validateResponse: false, payload: { jsonLimit: 100 * 1024, // 100kb textLimit: 100 * 1024, // 100kb blobLimit: 5 * 1024 * 1024, // 5mb }, } - if (config.rateLimitsEnabled) { + if (cfg.rateLimits.enabled) { + const bypassSecret = cfg.rateLimits.bypassKey + const bypassIps = cfg.rateLimits.bypassIps let rlCreator: RateLimiterCreator - if (redisScratch) { + if (cfg.rateLimits.mode === 'redis') { + if (!ctx.redisScratch) { + throw new Error('Redis not set up for ratelimiting mode: `redis`') + } rlCreator = (opts: RateLimiterOpts) => - RateLimiter.redis(redisScratch, { - bypassSecret: config.rateLimitBypassKey, - bypassIps: config.rateLimitBypassIps, + RateLimiter.redis(ctx.redisScratch, { + bypassSecret, + bypassIps, ...opts, }) } else { rlCreator = (opts: RateLimiterOpts) => RateLimiter.memory({ - bypassSecret: config.rateLimitBypassKey, - bypassIps: config.rateLimitBypassIps, + bypassSecret, + bypassIps, ...opts, }) } @@ -224,7 +122,10 @@ export class PDS { app.use(server.xrpc.router) app.use(error.handler) - return new PDS({ ctx, app }) + return new PDS({ + ctx, + app, + }) } async start(): Promise { @@ -263,7 +164,7 @@ export class PDS { await this.ctx.sequencer.start() await this.ctx.db.startListeningToChannels() await this.ctx.runtimeFlags.start() - const server = this.app.listen(this.ctx.cfg.port) + const server = this.app.listen(this.ctx.cfg.service.port) this.server = server this.server.keepAliveTimeout = 90000 this.terminator = createHttpTerminator({ server }) diff --git a/packages/pds/src/mailer/index.ts b/packages/pds/src/mailer/index.ts index 6c77fc8964c..92ce8a88c83 100644 --- a/packages/pds/src/mailer/index.ts +++ b/packages/pds/src/mailer/index.ts @@ -30,10 +30,8 @@ export class ServerMailer { } // The returned config can be used inside email templates. - static getEmailConfig(config: ServerConfig) { - return { - appUrlPasswordReset: config.appUrlPasswordReset, - } + static getEmailConfig(_config: ServerConfig) { + return {} } async sendResetPassword( @@ -74,10 +72,10 @@ export class ServerMailer { }) const res = await this.transporter.sendMail({ ...mailOpts, - from: mailOpts.from ?? this.config.emailNoReplyAddress, + from: mailOpts.from ?? this.config.email?.fromAddress, html, }) - if (!this.config.emailSmtpUrl) { + if (!this.config.email?.smtpUrl) { mailerLogger.debug( 'No SMTP URL has been configured. Intended to send email:\n' + JSON.stringify(res, null, 2), diff --git a/packages/pds/src/mailer/moderation.ts b/packages/pds/src/mailer/moderation.ts index 6d77d7853c9..7ddd737169d 100644 --- a/packages/pds/src/mailer/moderation.ts +++ b/packages/pds/src/mailer/moderation.ts @@ -1,5 +1,6 @@ import { Transporter } from 'nodemailer' import Mail from 'nodemailer/lib/mailer' +import { htmlToText } from 'nodemailer-html-to-text' import SMTPTransport from 'nodemailer/lib/smtp-transport' import { ServerConfig } from '../config' import { mailerLogger } from '../logger' @@ -14,16 +15,19 @@ export class ModerationMailer { ) { this.config = config this.transporter = transporter + this.transporter.use('compile', htmlToText()) } async send({ content }: { content: string }, mailOpts: Mail.Options) { - const res = await this.transporter.sendMail({ + const mail = { ...mailOpts, - text: content, - from: this.config.moderationEmailAddress, - }) + html: content, + from: this.config.moderationEmail?.fromAddress, + } + + const res = await this.transporter.sendMail(mail) - if (!this.config.moderationEmailSmtpUrl) { + if (!this.config.moderationEmail?.smtpUrl) { mailerLogger.debug( 'Moderation email auth is not configured. Intended to send email:\n' + JSON.stringify(res, null, 2), diff --git a/packages/pds/src/repo/prepare.ts b/packages/pds/src/repo/prepare.ts index 2147ef552b6..88201455300 100644 --- a/packages/pds/src/repo/prepare.ts +++ b/packages/pds/src/repo/prepare.ts @@ -31,6 +31,7 @@ import { Record as PostRecord, isRecord as isPost, } from '../lexicon/types/app/bsky/feed/post' +import { isTag } from '../lexicon/types/app/bsky/richtext/facet' import { isRecord as isList } from '../lexicon/types/app/bsky/graph/list' import { isRecord as isProfile } from '../lexicon/types/app/bsky/actor/profile' import { hasExplicitSlur } from '../handle/explicit-slurs' @@ -300,7 +301,17 @@ function assertNoExplicitSlurs(rkey: string, record: RepoRecord) { toCheck += ' ' + rkey toCheck += ' ' + record.displayName } else if (isPost(record)) { - toCheck += record.tags?.join(' ') + if (record.tags) { + toCheck += record.tags.join(' ') + } + + for (const facet of record.facets || []) { + for (const feat of facet.features) { + if (isTag(feat)) { + toCheck += ' ' + feat.tag + } + } + } } if (hasExplicitSlur(toCheck)) { throw new InvalidRecordError('Unacceptable slur in record') diff --git a/packages/pds/src/services/account/index.ts b/packages/pds/src/services/account/index.ts index 33978be9b8b..9a6910d0e4f 100644 --- a/packages/pds/src/services/account/index.ts +++ b/packages/pds/src/services/account/index.ts @@ -1,4 +1,5 @@ import { sql } from 'kysely' +import { randomStr } from '@atproto/crypto' import { InvalidRequestError } from '@atproto/xrpc-server' import { MINUTE, lessThanAgoMs } from '@atproto/common' import { dbLogger as log } from '../../logger' @@ -8,11 +9,9 @@ import { UserAccountEntry } from '../../db/tables/user-account' import { DidHandle } from '../../db/tables/did-handle' import { RepoRoot } from '../../db/tables/repo-root' import { countAll, notSoftDeletedClause } from '../../db/util' -import { getUserSearchQueryPg, getUserSearchQuerySqlite } from '../util/search' import { paginate, TimeCidKeyset } from '../../db/pagination' import * as sequencer from '../../sequencer' import { AppPassword } from '../../lexicon/types/com/atproto/server/createAppPassword' -import { randomStr } from '@atproto/crypto' import { EmailTokenPurpose } from '../../db/tables/email-token' import { getRandomToken } from '../../api/com/atproto/server/util' @@ -136,22 +135,12 @@ export class AccountService { .onConflict((oc) => oc.doNothing()) .returning('handle') .executeTakeFirst() - const registerUserState = this.db.db - .insertInto('user_state') - .values({ - did, - lastSeenNotifs: new Date().toISOString(), - }) - .onConflict((oc) => oc.doNothing()) - .returning('did') - .executeTakeFirst() - const [res1, res2, res3] = await Promise.all([ + const [res1, res2] = await Promise.all([ registerUserAccnt, registerDidHandle, - registerUserState, ]) - if (!res1 || !res2 || !res3) { + if (!res1 || !res2) { throw new UserAlreadyExistsError() } log.info({ handle, email, did }, 'registered user') @@ -287,105 +276,45 @@ export class AccountService { .execute() } - async mute(info: { did: string; mutedByDid: string; createdAt?: Date }) { - const { did, mutedByDid, createdAt = new Date() } = info - await this.db.db - .insertInto('mute') - .values({ - did, - mutedByDid, - createdAt: createdAt.toISOString(), - }) - .onConflict((oc) => oc.doNothing()) - .execute() - } - - async unmute(info: { did: string; mutedByDid: string }) { - const { did, mutedByDid } = info - await this.db.db - .deleteFrom('mute') - .where('did', '=', did) - .where('mutedByDid', '=', mutedByDid) - .execute() - } - - async getMute(mutedBy: string, did: string): Promise { - const mutes = await this.getMutes(mutedBy, [did]) - return mutes[did] ?? false - } - - async getMutes( - mutedBy: string, - dids: string[], - ): Promise> { - if (dids.length === 0) return {} - const res = await this.db.db - .selectFrom('mute') - .where('mutedByDid', '=', mutedBy) - .where('did', 'in', dids) - .selectAll() - .execute() - return res.reduce((acc, cur) => { - acc[cur.did] = true - return acc - }, {} as Record) - } - - async muteActorList(info: { - list: string - mutedByDid: string - createdAt?: Date - }) { - const { list, mutedByDid, createdAt = new Date() } = info - await this.db.db - .insertInto('list_mute') - .values({ - listUri: list, - mutedByDid, - createdAt: createdAt.toISOString(), - }) - .onConflict((oc) => oc.doNothing()) - .execute() - } - - async unmuteActorList(info: { list: string; mutedByDid: string }) { - const { list, mutedByDid } = info - await this.db.db - .deleteFrom('list_mute') - .where('listUri', '=', list) - .where('mutedByDid', '=', mutedByDid) - .execute() - } - async search(opts: { - searchField?: 'did' | 'handle' - term: string + query: string limit: number cursor?: string includeSoftDeleted?: boolean - }): Promise<(RepoRoot & DidHandle & { distance: number })[]> { - if (opts.searchField === 'did') { - const didSearchBuilder = this.db.db - .selectFrom('did_handle') - .where('did_handle.did', '=', opts.term) - .innerJoin('repo_root', 'repo_root.did', 'did_handle.did') - .selectAll(['did_handle', 'repo_root']) - .select(sql`0`.as('distance')) - - return await didSearchBuilder.execute() - } + }): Promise<(RepoRoot & DidHandle)[]> { + const { query, limit, cursor, includeSoftDeleted } = opts + const { ref } = this.db.db.dynamic + + const builder = this.db.db + .selectFrom('did_handle') + .innerJoin('repo_root', 'repo_root.did', 'did_handle.did') + .innerJoin('user_account', 'user_account.did', 'did_handle.did') + .if(!includeSoftDeleted, (qb) => + qb.where(notSoftDeletedClause(ref('repo_root'))), + ) + .where((qb) => { + // sqlite doesn't support "ilike", but performs "like" case-insensitively + const likeOp = this.db.dialect === 'pg' ? 'ilike' : 'like' + if (query.includes('@')) { + return qb.where('user_account.email', likeOp, `%${query}%`) + } + if (query.startsWith('did:')) { + return qb.where('did_handle.did', '=', query) + } + return qb.where('did_handle.handle', likeOp, `${query}%`) + }) + .selectAll(['did_handle', 'repo_root']) + + const keyset = new ListKeyset( + ref('repo_root.indexedAt'), + ref('did_handle.handle'), + ) - const builder = - this.db.dialect === 'pg' - ? getUserSearchQueryPg(this.db, opts) - .selectAll('did_handle') - .selectAll('repo_root') - : getUserSearchQuerySqlite(this.db, opts) - .selectAll('did_handle') - .selectAll('repo_root') - .select(sql`0`.as('distance')) - - return await builder.execute() + return await paginate(builder, { + limit, + cursor, + keyset, + }).execute() } async list(opts: { @@ -437,10 +366,6 @@ export class AccountService { .deleteFrom('user_account') .where('user_account.did', '=', did) .execute() - await this.db.db - .deleteFrom('user_state') - .where('user_state.did', '=', did) - .execute() await this.db.db .deleteFrom('did_handle') .where('did_handle.did', '=', did) @@ -540,7 +465,11 @@ export class AccountService { await this.db.db .insertInto('email_token') .values({ purpose, did, token, requestedAt: new Date() }) - .onConflict((oc) => oc.columns(['purpose', 'did']).doUpdateSet({ token })) + .onConflict((oc) => + oc + .columns(['purpose', 'did']) + .doUpdateSet({ token, requestedAt: new Date() }), + ) .execute() return token } @@ -564,7 +493,7 @@ export class AccountService { .selectAll() .where('purpose', '=', purpose) .where('did', '=', did) - .where('token', '=', token) + .where('token', '=', token.toUpperCase()) .executeTakeFirst() if (!res) { throw new InvalidRequestError('Token is invalid', 'InvalidToken') @@ -575,13 +504,25 @@ export class AccountService { } } - async getLastSeenNotifs(did: string): Promise { + async assertValidTokenAndFindDid( + purpose: EmailTokenPurpose, + token: string, + expirationLen = 15 * MINUTE, + ): Promise { const res = await this.db.db - .selectFrom('user_state') - .where('did', '=', did) + .selectFrom('email_token') .selectAll() + .where('purpose', '=', purpose) + .where('token', '=', token.toUpperCase()) .executeTakeFirst() - return res?.lastSeenNotifs + if (!res) { + throw new InvalidRequestError('Token is invalid', 'InvalidToken') + } + const expired = !lessThanAgoMs(res.requestedAt, expirationLen) + if (expired) { + throw new InvalidRequestError('Token is expired', 'ExpiredToken') + } + return res.did } async getPreferences( diff --git a/packages/pds/src/services/index.ts b/packages/pds/src/services/index.ts index 3dd376a8dd6..954a5544e6e 100644 --- a/packages/pds/src/services/index.ts +++ b/packages/pds/src/services/index.ts @@ -14,18 +14,20 @@ import { LocalService } from './local' export function createServices(resources: { repoSigningKey: crypto.Keypair blobstore: BlobStore - appviewAgent?: AtpAgent - appviewDid?: string - appviewCdnUrlPattern?: string + pdsHostname: string + appViewAgent?: AtpAgent + appViewDid?: string + appViewCdnUrlPattern?: string backgroundQueue: BackgroundQueue crawlers: Crawlers }): Services { const { repoSigningKey, blobstore, - appviewAgent, - appviewDid, - appviewCdnUrlPattern, + pdsHostname, + appViewAgent, + appViewDid, + appViewCdnUrlPattern, backgroundQueue, crawlers, } = resources @@ -41,9 +43,10 @@ export function createServices(resources: { ), local: LocalService.creator( repoSigningKey, - appviewAgent, - appviewDid, - appviewCdnUrlPattern, + pdsHostname, + appViewAgent, + appViewDid, + appViewCdnUrlPattern, ), moderation: ModerationService.creator(blobstore), } diff --git a/packages/pds/src/services/local/index.ts b/packages/pds/src/services/local/index.ts index 867f3baf4e7..c5cc782357f 100644 --- a/packages/pds/src/services/local/index.ts +++ b/packages/pds/src/services/local/index.ts @@ -39,14 +39,16 @@ export class LocalService { constructor( public db: Database, public signingKey: Keypair, - public appviewAgent?: AtpAgent, + public pdsHostname: string, + public appViewAgent?: AtpAgent, public appviewDid?: string, public appviewCdnUrlPattern?: string, ) {} static creator( signingKey: Keypair, - appviewAgent?: AtpAgent, + pdsHostname: string, + appViewAgent?: AtpAgent, appviewDid?: string, appviewCdnUrlPattern?: string, ) { @@ -54,7 +56,8 @@ export class LocalService { new LocalService( db, signingKey, - appviewAgent, + pdsHostname, + appViewAgent, appviewDid, appviewCdnUrlPattern, ) @@ -62,7 +65,7 @@ export class LocalService { getImageUrl(pattern: CommonSignedUris, did: string, cid: string) { if (!this.appviewCdnUrlPattern) { - return '' + return `https://${this.pdsHostname}/xrpc/${ids.ComAtprotoSyncGetBlob}?did=${did}&cid=${cid}` } return util.format(this.appviewCdnUrlPattern, pattern, did, cid) } @@ -255,12 +258,12 @@ export class LocalService { } async formatRecordEmbedInternal(did: string, embed: EmbedRecord) { - if (!this.appviewAgent || !this.appviewDid) { + if (!this.appViewAgent || !this.appviewDid) { return null } const collection = new AtUri(embed.record.uri).collection if (collection === ids.AppBskyFeedPost) { - const res = await this.appviewAgent.api.app.bsky.feed.getPosts( + const res = await this.appViewAgent.api.app.bsky.feed.getPosts( { uris: [embed.record.uri], }, @@ -279,7 +282,7 @@ export class LocalService { indexedAt: post.indexedAt, } } else if (collection === ids.AppBskyFeedGenerator) { - const res = await this.appviewAgent.api.app.bsky.feed.getFeedGenerator( + const res = await this.appViewAgent.api.app.bsky.feed.getFeedGenerator( { feed: embed.record.uri, }, @@ -290,7 +293,7 @@ export class LocalService { ...res.data.view, } } else if (collection === ids.AppBskyGraphList) { - const res = await this.appviewAgent.api.app.bsky.graph.getList( + const res = await this.appViewAgent.api.app.bsky.graph.getList( { list: embed.record.uri, }, diff --git a/packages/pds/src/services/record/index.ts b/packages/pds/src/services/record/index.ts index 3cdb7bbc05e..1914d1b8c61 100644 --- a/packages/pds/src/services/record/index.ts +++ b/packages/pds/src/services/record/index.ts @@ -217,10 +217,6 @@ export class RecordService { // Not done in transaction because it would be too long, prone to contention. // Also, this can safely be run multiple times if it fails. await this.db.db.deleteFrom('record').where('did', '=', did).execute() - await this.db.db - .deleteFrom('user_notification') - .where('author', '=', did) - .execute() } async removeBacklinksByUri(uri: AtUri) { diff --git a/packages/pds/src/services/repo/blobs.ts b/packages/pds/src/services/repo/blobs.ts index a366f4def1f..2bedb88ecfd 100644 --- a/packages/pds/src/services/repo/blobs.ts +++ b/packages/pds/src/services/repo/blobs.ts @@ -7,11 +7,11 @@ import { BlobStore, CidSet, WriteOpAction } from '@atproto/repo' import { AtUri } from '@atproto/syntax' import { cloneStream, sha256RawToCid, streamSize } from '@atproto/common' import { InvalidRequestError } from '@atproto/xrpc-server' +import { BlobRef } from '@atproto/lexicon' import { PreparedBlobRef, PreparedWrite } from '../../repo/types' import Database from '../../db' import { Blob as BlobTable } from '../../db/tables/blob' import * as img from '../../image' -import { BlobRef } from '@atproto/lexicon' import { PreparedDelete, PreparedUpdate } from '../../repo' import { BackgroundQueue } from '../../background' diff --git a/packages/pds/src/services/util/search.ts b/packages/pds/src/services/util/search.ts deleted file mode 100644 index 26b3e81bf06..00000000000 --- a/packages/pds/src/services/util/search.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { sql } from 'kysely' -import { InvalidRequestError } from '@atproto/xrpc-server' -import Database from '../../db' -import { notSoftDeletedClause, DbRef } from '../../db/util' -import { GenericKeyset, paginate } from '../../db/pagination' - -// @TODO utilized in both pds and app-view -export const getUserSearchQueryPg = ( - db: Database, - opts: { - term: string - limit: number - cursor?: string - includeSoftDeleted?: boolean - invitedBy?: string - }, -) => { - const { ref } = db.db.dynamic - const { term, limit, cursor, includeSoftDeleted } = opts - // Matching user accounts based on handle - const distanceAccount = distance(term, ref('handle')) - const accountsQb = getMatchingAccountsQb(db, { term, includeSoftDeleted }) - return paginate(accountsQb, { - limit, - cursor, - direction: 'asc', - keyset: new SearchKeyset(distanceAccount, ref('handle')), - }) -} - -// Matching user accounts based on handle -const getMatchingAccountsQb = ( - db: Database, - opts: { term: string; includeSoftDeleted?: boolean }, -) => { - const { ref } = db.db.dynamic - const { term, includeSoftDeleted } = opts - const distanceAccount = distance(term, ref('handle')) - return db.db - .selectFrom('did_handle') - .innerJoin('repo_root', 'repo_root.did', 'did_handle.did') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('repo_root'))), - ) - .where(similar(term, ref('handle'))) // Coarse filter engaging trigram index - .select(['did_handle.did as did', distanceAccount.as('distance')]) -} - -export const getUserSearchQuerySqlite = ( - db: Database, - opts: { - term: string - limit: number - cursor?: string - includeSoftDeleted?: boolean - }, -) => { - const { ref } = db.db.dynamic - const { term, limit, cursor, includeSoftDeleted } = opts - - // Take the first three words in the search term. We're going to build a dynamic query - // based on the number of words, so to keep things predictable just ignore words 4 and - // beyond. We also remove the special wildcard characters supported by the LIKE operator, - // since that's where these values are heading. - const safeWords = term - .replace(/[%_]/g, '') - .split(/\s+/) - .filter(Boolean) - .slice(0, 3) - - if (!safeWords.length) { - // Return no results. This could happen with weird input like ' % _ '. - return db.db - .selectFrom('did_handle') - .innerJoin('repo_root', 'repo_root.did', 'did_handle.did') - .where(sql`1 = 0`) - } - - // We'll ensure there's a space before each word in both textForMatch and in safeWords, - // so that we can reliably match word prefixes using LIKE operator. - const textForMatch = sql`lower(' ' || ${ref( - 'did_handle.handle', - )} || ' ' || coalesce(${ref('profile.displayName')}, ''))` - - const keyset = new SearchKeyset(sql``, sql``) - const unpackedCursor = keyset.unpackCursor(cursor) - - return db.db - .selectFrom('did_handle') - .innerJoin('repo_root', 'repo_root.did', 'did_handle.did') - .if(!includeSoftDeleted, (qb) => - qb.where(notSoftDeletedClause(ref('repo_root'))), - ) - .where((q) => { - safeWords.forEach((word) => { - // Match word prefixes against contents of handle and displayName - q = q.where(textForMatch, 'like', `% ${word.toLowerCase()}%`) - }) - return q - }) - .if(!!unpackedCursor, (qb) => - unpackedCursor ? qb.where('handle', '>', unpackedCursor.secondary) : qb, - ) - .orderBy('handle') - .limit(limit) -} - -// Remove leading @ in case a handle is input that way -export const cleanTerm = (term: string) => term.trim().replace(/^@/g, '') - -// Uses pg_trgm strict word similarity to check similarity between a search term and a stored value -const distance = (term: string, ref: DbRef) => - sql`(${term} <<-> ${ref})` - -// Can utilize trigram index to match on strict word similarity. -// The word_similarity_threshold is set to .4 (i.e. distance < .6) in db/index.ts. -const similar = (term: string, ref: DbRef) => sql`(${term} <% ${ref})` - -type Result = { distance: number; handle: string } -type LabeledResult = { primary: number; secondary: string } -export class SearchKeyset extends GenericKeyset { - labelResult(result: Result) { - return { - primary: result.distance, - secondary: result.handle, - } - } - labeledResultToCursor(labeled: LabeledResult) { - return { - primary: labeled.primary.toString().replace('0.', '.'), - secondary: labeled.secondary, - } - } - cursorToLabeledResult(cursor: { primary: string; secondary: string }) { - const distance = parseFloat(cursor.primary) - if (isNaN(distance)) { - throw new InvalidRequestError('Malformed cursor') - } - return { - primary: distance, - secondary: cursor.secondary, - } - } -} diff --git a/packages/pds/src/sql-repo-storage.ts b/packages/pds/src/sql-repo-storage.ts index 7522e325bfa..13301ae300f 100644 --- a/packages/pds/src/sql-repo-storage.ts +++ b/packages/pds/src/sql-repo-storage.ts @@ -229,12 +229,6 @@ export class SqlRepoStorage extends ReadableBlockstore implements RepoStorage { const res = await this.getBlockRange(since, cursor) await writePromise writePromise = writeRows(res) - for (const row of res) { - await car.put({ - cid: CID.parse(row.cid), - bytes: row.content, - }) - } const lastRow = res.at(-1) if (lastRow && lastRow.repoRev) { cursor = { diff --git a/packages/pds/src/storage/disk-blobstore.ts b/packages/pds/src/storage/disk-blobstore.ts index 57826ba9d50..496e7b42c52 100644 --- a/packages/pds/src/storage/disk-blobstore.ts +++ b/packages/pds/src/storage/disk-blobstore.ts @@ -30,8 +30,7 @@ export class DiskBlobStore implements BlobStore { quarantineLocation?: string, ): Promise { const tmp = tmpLocation || path.join(os.tmpdir(), 'atproto/blobs') - const quarantine = - quarantineLocation || path.join(os.tmpdir(), 'atproto/blobs/quarantine') + const quarantine = quarantineLocation || path.join(location, 'quarantine') await Promise.all([ fs.mkdir(location, { recursive: true }), fs.mkdir(tmp, { recursive: true }), diff --git a/packages/pds/src/well-known.ts b/packages/pds/src/well-known.ts index 64474a45303..cc19434e42f 100644 --- a/packages/pds/src/well-known.ts +++ b/packages/pds/src/well-known.ts @@ -6,7 +6,7 @@ export const createRouter = (ctx: AppContext): express.Router => { router.get('/.well-known/atproto-did', async function (req, res) { const handle = req.hostname - const supportedHandle = ctx.cfg.availableUserDomains.some( + const supportedHandle = ctx.cfg.identity.serviceHandleDomains.some( (host) => handle.endsWith(host) || handle === host.slice(1), ) if (!supportedHandle) { diff --git a/packages/pds/tests/_util.ts b/packages/pds/tests/_util.ts index b8135836c15..5624ac9a65a 100644 --- a/packages/pds/tests/_util.ts +++ b/packages/pds/tests/_util.ts @@ -1,190 +1,8 @@ -import { AddressInfo } from 'net' -import os from 'os' -import path from 'path' -import * as crypto from '@atproto/crypto' -import * as plc from '@did-plc/lib' -import { PlcServer, Database as PlcDatabase } from '@did-plc/server' import { AtUri } from '@atproto/syntax' -import { randomStr } from '@atproto/crypto' -import { uniqueLockId } from '@atproto/dev-env' import { CID } from 'multiformats/cid' -import * as uint8arrays from 'uint8arrays' -import { PDS, ServerConfig, Database, MemoryBlobStore } from '../src/index' import { FeedViewPost } from '../src/lexicon/types/app/bsky/feed/defs' -import DiskBlobStore from '../src/storage/disk-blobstore' -import AppContext from '../src/context' -import { DAY, HOUR } from '@atproto/common' import { lexToJson } from '@atproto/lexicon' -const ADMIN_PASSWORD = 'admin-pass' -const MODERATOR_PASSWORD = 'moderator-pass' -const TRIAGE_PASSWORD = 'triage-pass' - -export type CloseFn = () => Promise -export type TestServerInfo = { - url: string - ctx: AppContext - close: CloseFn - processAll: () => Promise -} - -export type TestServerOpts = { - migration?: string -} - -export const runTestServer = async ( - params: Partial = {}, - opts: TestServerOpts = {}, -): Promise => { - const repoSigningKey = await crypto.Secp256k1Keypair.create() - const plcRotationKey = await crypto.Secp256k1Keypair.create() - - const dbPostgresUrl = params.dbPostgresUrl || process.env.DB_POSTGRES_URL - const dbPostgresSchema = - params.dbPostgresSchema || process.env.DB_POSTGRES_SCHEMA - // run plc server - - let plcDb - if (dbPostgresUrl !== undefined) { - plcDb = PlcDatabase.postgres({ - url: dbPostgresUrl, - schema: `plc_test_${dbPostgresSchema}`, - }) - await plcDb.migrateToLatestOrThrow() - } else { - plcDb = PlcDatabase.mock() - } - - const plcServer = PlcServer.create({ db: plcDb }) - const plcListener = await plcServer.start() - const plcPort = (plcListener.address() as AddressInfo).port - const plcUrl = `http://localhost:${plcPort}` - - const recoveryKey = (await crypto.Secp256k1Keypair.create()).did() - - const plcClient = new plc.Client(plcUrl) - const serverDid = await plcClient.createDid({ - signingKey: repoSigningKey.did(), - rotationKeys: [recoveryKey, plcRotationKey.did()], - handle: 'localhost', - pds: 'https://pds.public.url', - signer: plcRotationKey, - }) - - const blobstoreLoc = path.join(os.tmpdir(), randomStr(5, 'base32')) - - const cfg = new ServerConfig({ - debugMode: true, - version: '0.0.0', - scheme: 'http', - hostname: 'localhost', - serverDid, - recoveryKey, - adminPassword: ADMIN_PASSWORD, - moderatorPassword: MODERATOR_PASSWORD, - triagePassword: TRIAGE_PASSWORD, - inviteRequired: false, - userInviteInterval: null, - userInviteEpoch: Date.now(), - didPlcUrl: plcUrl, - didCacheMaxTTL: DAY, - didCacheStaleTTL: HOUR, - jwtSecret: 'jwt-secret', - availableUserDomains: ['.test'], - rateLimitsEnabled: false, - appUrlPasswordReset: 'app://forgot-password', - emailNoReplyAddress: 'noreply@blueskyweb.xyz', - publicUrl: 'https://pds.public.url', - dbPostgresUrl: process.env.DB_POSTGRES_URL, - blobstoreLocation: `${blobstoreLoc}/blobs`, - blobstoreTmp: `${blobstoreLoc}/tmp`, - maxSubscriptionBuffer: 200, - repoBackfillLimitMs: HOUR, - sequencerLeaderLockId: uniqueLockId(), - bskyAppViewEndpoint: 'http://fake_address.invalid', - bskyAppViewDid: 'did:example:fake', - dbTxLockNonce: await randomStr(32, 'base32'), - ...params, - }) - - const db = - cfg.dbPostgresUrl !== undefined - ? Database.postgres({ - url: cfg.dbPostgresUrl, - schema: cfg.dbPostgresSchema, - txLockNonce: cfg.dbTxLockNonce, - }) - : Database.memory() - - // Separate migration db on postgres in case migration changes some - // connection state that we need in the tests, e.g. "alter database ... set ..." - const migrationDb = - cfg.dbPostgresUrl !== undefined - ? Database.postgres({ - url: cfg.dbPostgresUrl, - schema: cfg.dbPostgresSchema, - txLockNonce: cfg.dbTxLockNonce, - }) - : db - if (opts.migration) { - await migrationDb.migrateToOrThrow(opts.migration) - } else { - await migrationDb.migrateToLatestOrThrow() - } - if (migrationDb !== db) { - await migrationDb.close() - } - - const blobstore = - cfg.blobstoreLocation !== undefined - ? await DiskBlobStore.create(cfg.blobstoreLocation, cfg.blobstoreTmp) - : new MemoryBlobStore() - - const pds = PDS.create({ - db, - blobstore, - repoSigningKey, - plcRotationKey, - config: cfg, - }) - const pdsServer = await pds.start() - const pdsPort = (pdsServer.address() as AddressInfo).port - - return { - url: `http://localhost:${pdsPort}`, - ctx: pds.ctx, - close: async () => { - await pds.destroy() - await plcServer.destroy() - }, - processAll: async () => { - await pds.ctx.backgroundQueue.processAll() - }, - } -} - -export const adminAuth = () => { - return basicAuth('admin', ADMIN_PASSWORD) -} - -export const moderatorAuth = () => { - return basicAuth('admin', MODERATOR_PASSWORD) -} - -export const triageAuth = () => { - return basicAuth('admin', TRIAGE_PASSWORD) -} - -const basicAuth = (username: string, password: string) => { - return ( - 'Basic ' + - uint8arrays.toString( - uint8arrays.fromString(`${username}:${password}`, 'utf8'), - 'base64pad', - ) - ) -} - // Swap out identifiers and dates with stable // values for the purpose of snapshot testing export const forSnapshot = (obj: unknown) => { @@ -253,7 +71,7 @@ export const forSnapshot = (obj: unknown) => { const [, did, cid] = match return str.replace(did, take(users, did)).replace(cid, take(cids, cid)) } - if (str.startsWith('pds-public-url-')) { + if (str.startsWith('localhost-')) { return 'invite-code' } if (str.match(/^\d+::pds-public-url-/)) { diff --git a/packages/pds/tests/account-deletion.test.ts b/packages/pds/tests/account-deletion.test.ts index 6f7573bc423..12bdad8875a 100644 --- a/packages/pds/tests/account-deletion.test.ts +++ b/packages/pds/tests/account-deletion.test.ts @@ -1,11 +1,10 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import { once, EventEmitter } from 'events' import { Selectable } from 'kysely' import Mail from 'nodemailer/lib/mailer' import AtpAgent from '@atproto/api' -import { SeedClient } from './seeds/client' import basicSeed from './seeds/basic' import { Database } from '../src' -import * as util from './_util' import { ServerMailer } from '../src/mailer' import { BlobNotFoundError, BlobStore } from '@atproto/repo' import { RepoRoot } from '../src/db/tables/repo-root' @@ -16,12 +15,10 @@ import { Blob } from '../src/db/tables/blob' import { Record } from '../src/db/tables/record' import { RepoSeq } from '../src/db/tables/repo-seq' import { ACKNOWLEDGE } from '../src/lexicon/types/com/atproto/admin/defs' -import { UserState } from '../src/db/tables/user-state' describe('account deletion', () => { - let server: util.TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent - let close: util.CloseFn let sc: SeedClient let mailer: ServerMailer @@ -36,15 +33,14 @@ describe('account deletion', () => { let carol beforeAll(async () => { - server = await util.runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'account_deletion', }) - close = server.close - mailer = server.ctx.mailer - db = server.ctx.db - blobstore = server.ctx.blobstore - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + mailer = network.pds.ctx.mailer + db = network.pds.ctx.db + blobstore = network.pds.ctx.blobstore + agent = new AtpAgent({ service: network.pds.url }) + sc = network.getSeedClient() await basicSeed(sc) carol = sc.accounts[sc.dids.carol] @@ -61,14 +57,11 @@ describe('account deletion', () => { afterAll(async () => { mailer.transporter.sendMail = _origSendMail - if (close) { - await close() - } + await network.close() }) const getMailFrom = async (promise): Promise => { const result = await Promise.all([once(mailCatcher, 'mail'), promise]) - console.log(result) return result[0][0] } @@ -92,7 +85,6 @@ describe('account deletion', () => { return expect(token).toBeDefined() } }) - return it('fails account deletion with a bad token', async () => { const attempt = agent.api.com.atproto.server.deleteAccount({ @@ -126,7 +118,7 @@ describe('account deletion', () => { }, { encoding: 'application/json', - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) await agent.api.com.atproto.server.deleteAccount({ @@ -134,7 +126,7 @@ describe('account deletion', () => { did: carol.did, password: carol.password, }) - await server.processAll() // Finish background hard-deletions + await network.processAll() // Finish background hard-deletions }) it('no longer lets the user log in', async () => { @@ -153,9 +145,6 @@ describe('account deletion', () => { expect(updatedDbContents.users).toEqual( initialDbContents.users.filter((row) => row.did !== carol.did), ) - expect(updatedDbContents.userState).toEqual( - initialDbContents.userState.filter((row) => row.did !== carol.did), - ) expect(updatedDbContents.blocks).toEqual( initialDbContents.blocks.filter((row) => row.creator !== carol.did), ) @@ -222,7 +211,6 @@ describe('account deletion', () => { type DbContents = { roots: RepoRoot[] users: Selectable[] - userState: UserState[] blocks: IpldBlock[] seqs: Selectable[] records: Record[] @@ -231,11 +219,10 @@ type DbContents = { } const getDbContents = async (db: Database): Promise => { - const [roots, users, userState, blocks, seqs, records, repoBlobs, blobs] = + const [roots, users, blocks, seqs, records, repoBlobs, blobs] = await Promise.all([ db.db.selectFrom('repo_root').orderBy('did').selectAll().execute(), db.db.selectFrom('user_account').orderBy('did').selectAll().execute(), - db.db.selectFrom('user_state').orderBy('did').selectAll().execute(), db.db .selectFrom('ipld_block') .orderBy('creator') @@ -256,7 +243,6 @@ const getDbContents = async (db: Database): Promise => { return { roots, users, - userState, blocks, seqs, records, diff --git a/packages/pds/tests/account.test.ts b/packages/pds/tests/account.test.ts index 78a769b6e9f..f157380a1c1 100644 --- a/packages/pds/tests/account.test.ts +++ b/packages/pds/tests/account.test.ts @@ -2,9 +2,9 @@ import { once, EventEmitter } from 'events' import AtpAgent, { ComAtprotoServerResetPassword } from '@atproto/api' import { IdResolver } from '@atproto/identity' import * as crypto from '@atproto/crypto' +import { TestNetworkNoAppView } from '@atproto/dev-env' import Mail from 'nodemailer/lib/mailer' import { AppContext, Database } from '../src' -import * as util from './_util' import { ServerMailer } from '../src/mailer' const email = 'alice@test.com' @@ -14,11 +14,10 @@ const passwordAlt = 'test456' const minsToMs = 60 * 1000 describe('account', () => { - let serverUrl: string + let network: TestNetworkNoAppView let ctx: AppContext let repoSigningKey: string let agent: AtpAgent - let close: util.CloseFn let mailer: ServerMailer let db: Database let idResolver: IdResolver @@ -26,19 +25,19 @@ describe('account', () => { let _origSendMail beforeAll(async () => { - const server = await util.runTestServer({ - termsOfServiceUrl: 'https://example.com/tos', - privacyPolicyUrl: '/privacy-policy', + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'account', + pds: { + termsOfServiceUrl: 'https://example.com/tos', + privacyPolicyUrl: 'https://example.com/privacy-policy', + }, }) - close = server.close - mailer = server.ctx.mailer - db = server.ctx.db - ctx = server.ctx - serverUrl = server.url - repoSigningKey = server.ctx.repoSigningKey.did() - idResolver = new IdResolver({ plcUrl: ctx.cfg.didPlcUrl }) - agent = new AtpAgent({ service: serverUrl }) + mailer = network.pds.ctx.mailer + db = network.pds.ctx.db + ctx = network.pds.ctx + repoSigningKey = network.pds.ctx.repoSigningKey.did() + idResolver = network.pds.ctx.idResolver + agent = network.pds.getClient() // Catch emails for use in tests _origSendMail = mailer.transporter.sendMail @@ -51,9 +50,7 @@ describe('account', () => { afterAll(async () => { mailer.transporter.sendMail = _origSendMail - if (close) { - await close() - } + await network.close() }) it('serves the accounts system config', async () => { @@ -62,7 +59,7 @@ describe('account', () => { expect(res.data.availableUserDomains[0]).toBe('.test') expect(typeof res.data.inviteCodeRequired).toBe('boolean') expect(res.data.links?.privacyPolicy).toBe( - 'https://pds.public.url/privacy-policy', + 'https://example.com/privacy-policy', ) expect(res.data.links?.termsOfService).toBe('https://example.com/tos') }) @@ -76,6 +73,28 @@ describe('account', () => { await expect(promise).rejects.toThrow('Input/handle must be a valid handle') }) + describe('email validation', () => { + it('succeeds on allowed emails', async () => { + const promise = agent.api.com.atproto.server.createAccount({ + email: 'ok-email@gmail.com', + handle: 'ok-email.test', + password: 'asdf', + }) + await expect(promise).resolves.toBeTruthy() + }) + + it('fails on disallowed emails', async () => { + const promise = agent.api.com.atproto.server.createAccount({ + email: 'bad-email@disposeamail.com', + handle: 'bad-email.test', + password: 'asdf', + }) + await expect(promise).rejects.toThrow( + 'This email address is not supported, please use a different email.', + ) + }) + }) + let did: string let jwt: string @@ -99,7 +118,7 @@ describe('account', () => { expect(didData.did).toBe(did) expect(didData.handle).toBe(handle) expect(didData.signingKey).toBe(repoSigningKey) - expect(didData.pds).toBe('https://pds.public.url') // Mapped from publicUrl + expect(didData.pds).toBe(network.pds.url) }) it('allows a custom set recovery key', async () => { @@ -115,7 +134,7 @@ describe('account', () => { expect(didData.rotationKeys).toEqual([ recoveryKey, - ctx.cfg.recoveryKey, + ctx.cfg.identity.recoveryDidKey, ctx.plcRotationKey.did(), ]) }) @@ -128,10 +147,10 @@ describe('account', () => { handle, rotationKeys: [ userKey.did(), - ctx.cfg.recoveryKey, + ctx.cfg.identity.recoveryDidKey ?? '', ctx.plcRotationKey.did(), ], - pds: ctx.cfg.publicUrl, + pds: network.pds.url, signer: userKey, }) @@ -153,10 +172,10 @@ describe('account', () => { handle: 'byo-did.test', rotationKeys: [ userKey.did(), - ctx.cfg.recoveryKey, + ctx.cfg.identity.recoveryDidKey ?? '', ctx.plcRotationKey.did(), ], - pds: ctx.cfg.publicUrl, + pds: ctx.cfg.service.publicUrl, signer: userKey, } const baseAccntInfo = { @@ -222,7 +241,7 @@ describe('account', () => { }, { encoding: 'application/json', - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -236,7 +255,7 @@ describe('account', () => { }, { encoding: 'application/json', - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -252,7 +271,7 @@ describe('account', () => { }, { encoding: 'application/json', - headers: { authorization: util.moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) await expect(attemptUpdateMod).rejects.toThrow('Insufficient privileges') @@ -263,7 +282,7 @@ describe('account', () => { }, { encoding: 'application/json', - headers: { authorization: util.triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), }, ) await expect(attemptUpdateTriage).rejects.toThrow('Insufficient privileges') @@ -506,24 +525,23 @@ describe('account', () => { it('allows only unexpired password reset tokens', async () => { await agent.api.com.atproto.server.requestPasswordReset({ email }) - const user = await db.db - .updateTable('user_account') - .where('email', '=', email) + const res = await db.db + .updateTable('email_token') + .where('purpose', '=', 'reset_password') + .where('did', '=', did) .set({ - passwordResetGrantedAt: new Date( - Date.now() - 16 * minsToMs, - ).toISOString(), + requestedAt: new Date(Date.now() - 16 * minsToMs), }) - .returning(['passwordResetToken']) + .returning(['token']) .executeTakeFirst() - if (!user?.passwordResetToken) { + if (!res?.token) { throw new Error('Missing reset token') } // Use of expired token fails await expect( agent.api.com.atproto.server.resetPassword({ - token: user.passwordResetToken, + token: res.token, password: passwordAlt, }), ).rejects.toThrow(ComAtprotoServerResetPassword.ExpiredTokenError) diff --git a/packages/pds/tests/__snapshots__/moderation.test.ts.snap b/packages/pds/tests/admin/__snapshots__/moderation.test.ts.snap similarity index 100% rename from packages/pds/tests/__snapshots__/moderation.test.ts.snap rename to packages/pds/tests/admin/__snapshots__/moderation.test.ts.snap diff --git a/packages/pds/tests/admin/get-moderation-action.test.ts b/packages/pds/tests/admin/get-moderation-action.test.ts index 54b86984416..11a64799db3 100644 --- a/packages/pds/tests/admin/get-moderation-action.test.ts +++ b/packages/pds/tests/admin/get-moderation-action.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { FLAG, @@ -7,27 +8,25 @@ import { REASONOTHER, REASONSPAM, } from '../../src/lexicon/types/com/atproto/moderation/defs' -import { runTestServer, forSnapshot, CloseFn, adminAuth } from '../_util' -import { SeedClient } from '../seeds/client' +import { forSnapshot } from '../_util' import basicSeed from '../seeds/basic' describe('pds admin get moderation action view', () => { + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_get_moderation_action', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { @@ -79,7 +78,7 @@ describe('pds admin get moderation action view', () => { // id 2 because id 1 is in seed client const result = await agent.api.com.atproto.admin.getModerationAction( { id: 2 }, - { headers: { authorization: adminAuth() } }, + { headers: { authorization: network.pds.adminAuth() } }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -88,7 +87,7 @@ describe('pds admin get moderation action view', () => { // id 3 because id 1 is in seed client const result = await agent.api.com.atproto.admin.getModerationAction( { id: 3 }, - { headers: { authorization: adminAuth() } }, + { headers: { authorization: network.pds.adminAuth() } }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -96,7 +95,7 @@ describe('pds admin get moderation action view', () => { it('fails when moderation action does not exist.', async () => { const promise = agent.api.com.atproto.admin.getModerationAction( { id: 100 }, - { headers: { authorization: adminAuth() } }, + { headers: { authorization: network.pds.adminAuth() } }, ) await expect(promise).rejects.toThrow('Action not found') }) diff --git a/packages/pds/tests/admin/get-moderation-actions.test.ts b/packages/pds/tests/admin/get-moderation-actions.test.ts index 1ad5e066c60..01a934c32e0 100644 --- a/packages/pds/tests/admin/get-moderation-actions.test.ts +++ b/packages/pds/tests/admin/get-moderation-actions.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { ACKNOWLEDGE, @@ -8,33 +9,25 @@ import { REASONOTHER, REASONSPAM, } from '../../src/lexicon/types/com/atproto/moderation/defs' -import { - runTestServer, - forSnapshot, - CloseFn, - adminAuth, - paginateAll, -} from '../_util' -import { SeedClient } from '../seeds/client' +import { forSnapshot, paginateAll } from '../_util' import basicSeed from '../seeds/basic' describe('pds admin get moderation actions view', () => { + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_get_moderation_actions', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { @@ -124,7 +117,7 @@ describe('pds admin get moderation actions view', () => { it('gets all moderation actions.', async () => { const result = await agent.api.com.atproto.admin.getModerationActions( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data.actions)).toMatchSnapshot() }) @@ -132,7 +125,7 @@ describe('pds admin get moderation actions view', () => { it('gets all moderation actions for a repo.', async () => { const result = await agent.api.com.atproto.admin.getModerationActions( { subject: Object.values(sc.dids)[0] }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data.actions)).toMatchSnapshot() }) @@ -140,7 +133,7 @@ describe('pds admin get moderation actions view', () => { it('gets all moderation actions for a record.', async () => { const result = await agent.api.com.atproto.admin.getModerationActions( { subject: Object.values(sc.posts)[0][0].ref.uriStr }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data.actions)).toMatchSnapshot() }) @@ -150,7 +143,7 @@ describe('pds admin get moderation actions view', () => { const paginator = async (cursor?: string) => { const res = await agent.api.com.atproto.admin.getModerationActions( { cursor, limit: 3 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) return res.data } @@ -162,7 +155,7 @@ describe('pds admin get moderation actions view', () => { const full = await agent.api.com.atproto.admin.getModerationActions( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(full.data.actions.length).toEqual(7) // extra one because of seed client diff --git a/packages/pds/tests/admin/get-moderation-report.test.ts b/packages/pds/tests/admin/get-moderation-report.test.ts index 7d433539b37..714596e352f 100644 --- a/packages/pds/tests/admin/get-moderation-report.test.ts +++ b/packages/pds/tests/admin/get-moderation-report.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { FLAG, @@ -7,27 +8,25 @@ import { REASONOTHER, REASONSPAM, } from '../../src/lexicon/types/com/atproto/moderation/defs' -import { runTestServer, forSnapshot, CloseFn, adminAuth } from '../_util' -import { SeedClient } from '../seeds/client' +import { forSnapshot } from '../_util' import basicSeed from '../seeds/basic' describe('pds admin get moderation action view', () => { + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_get_moderation_report', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { @@ -78,7 +77,7 @@ describe('pds admin get moderation action view', () => { it('gets moderation report for a repo.', async () => { const result = await agent.api.com.atproto.admin.getModerationReport( { id: 1 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -86,7 +85,7 @@ describe('pds admin get moderation action view', () => { it('gets moderation report for a record.', async () => { const result = await agent.api.com.atproto.admin.getModerationReport( { id: 2 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -94,7 +93,7 @@ describe('pds admin get moderation action view', () => { it('fails when moderation report does not exist.', async () => { const promise = agent.api.com.atproto.admin.getModerationReport( { id: 100 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) await expect(promise).rejects.toThrow('Report not found') }) diff --git a/packages/pds/tests/admin/get-moderation-reports.test.ts b/packages/pds/tests/admin/get-moderation-reports.test.ts index 20f1c97f781..aac3560c048 100644 --- a/packages/pds/tests/admin/get-moderation-reports.test.ts +++ b/packages/pds/tests/admin/get-moderation-reports.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { ACKNOWLEDGE, @@ -8,33 +9,25 @@ import { REASONOTHER, REASONSPAM, } from '../../src/lexicon/types/com/atproto/moderation/defs' -import { - runTestServer, - forSnapshot, - CloseFn, - adminAuth, - paginateAll, -} from '../_util' -import { SeedClient } from '../seeds/client' +import { forSnapshot, paginateAll } from '../_util' import basicSeed from '../seeds/basic' describe('pds admin get moderation reports view', () => { + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_get_moderation_reports', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { @@ -132,7 +125,7 @@ describe('pds admin get moderation reports view', () => { const allReports = await agent.api.com.atproto.admin.getModerationReports( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const ignoreSubjects = getDids(allReports).slice(0, 2) @@ -140,7 +133,7 @@ describe('pds admin get moderation reports view', () => { const filteredReportsByDid = await agent.api.com.atproto.admin.getModerationReports( { ignoreSubjects }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) // Validate that when ignored by DID, all reports for that DID is ignored @@ -159,7 +152,7 @@ describe('pds admin get moderation reports view', () => { { ignoreSubjects: ignoredAtUriSubjects, }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) // Validate that when ignored by at uri, only the reports for that at uri is ignored @@ -176,7 +169,7 @@ describe('pds admin get moderation reports view', () => { it('gets all moderation reports.', async () => { const result = await agent.api.com.atproto.admin.getModerationReports( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data.reports)).toMatchSnapshot() }) @@ -184,7 +177,7 @@ describe('pds admin get moderation reports view', () => { it('gets all moderation reports for a repo.', async () => { const result = await agent.api.com.atproto.admin.getModerationReports( { subject: Object.values(sc.dids)[0] }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data.reports)).toMatchSnapshot() }) @@ -192,7 +185,7 @@ describe('pds admin get moderation reports view', () => { it('gets all moderation reports for a record.', async () => { const result = await agent.api.com.atproto.admin.getModerationReports( { subject: Object.values(sc.posts)[0][0].ref.uriStr }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data.reports)).toMatchSnapshot() }) @@ -200,12 +193,12 @@ describe('pds admin get moderation reports view', () => { it('gets all resolved/unresolved moderation reports.', async () => { const resolved = await agent.api.com.atproto.admin.getModerationReports( { resolved: true }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(resolved.data.reports)).toMatchSnapshot() const unresolved = await agent.api.com.atproto.admin.getModerationReports( { resolved: false }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(unresolved.data.reports)).toMatchSnapshot() }) @@ -221,11 +214,11 @@ describe('pds admin get moderation reports view', () => { ] = await Promise.all([ agent.api.com.atproto.admin.getModerationReports( { reverse: true }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ), agent.api.com.atproto.admin.getModerationReports( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ), ]) @@ -237,7 +230,7 @@ describe('pds admin get moderation reports view', () => { const reportsWithTakedown = await agent.api.com.atproto.admin.getModerationReports( { actionType: TAKEDOWN }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(reportsWithTakedown.data.reports)).toMatchSnapshot() }) @@ -248,21 +241,21 @@ describe('pds admin get moderation reports view', () => { const [actionedByAdminOne, actionedByAdminTwo] = await Promise.all([ agent.api.com.atproto.admin.getModerationReports( { actionedBy: adminDidOne }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ), agent.api.com.atproto.admin.getModerationReports( { actionedBy: adminDidTwo }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ), ]) const [fullReportOne, fullReportTwo] = await Promise.all([ agent.api.com.atproto.admin.getModerationReport( { id: actionedByAdminOne.data.reports[0].id }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ), agent.api.com.atproto.admin.getModerationReport( { id: actionedByAdminTwo.data.reports[0].id }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ), ]) @@ -281,7 +274,7 @@ describe('pds admin get moderation reports view', () => { const paginator = async (cursor?: string) => { const res = await agent.api.com.atproto.admin.getModerationReports( { cursor, limit: 3 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) return res.data } @@ -293,7 +286,7 @@ describe('pds admin get moderation reports view', () => { const full = await agent.api.com.atproto.admin.getModerationReports( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(full.data.reports.length).toEqual(6) @@ -306,7 +299,7 @@ describe('pds admin get moderation reports view', () => { async (cursor?: string) => { const res = await agent.api.com.atproto.admin.getModerationReports( { cursor, limit: 3, reverse }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) return res.data } @@ -326,7 +319,7 @@ describe('pds admin get moderation reports view', () => { it('filters reports by reporter DID.', async () => { const result = await agent.api.com.atproto.admin.getModerationReports( { reporters: [sc.dids.alice] }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const reporterDidsFromReports = [ diff --git a/packages/pds/tests/admin/get-record.test.ts b/packages/pds/tests/admin/get-record.test.ts index 6c38419612e..350709971fc 100644 --- a/packages/pds/tests/admin/get-record.test.ts +++ b/packages/pds/tests/admin/get-record.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { AtUri } from '@atproto/syntax' import { @@ -8,34 +9,25 @@ import { REASONOTHER, REASONSPAM, } from '../../src/lexicon/types/com/atproto/moderation/defs' -import { - runTestServer, - forSnapshot, - CloseFn, - adminAuth, - TestServerInfo, -} from '../_util' -import { SeedClient } from '../seeds/client' +import { forSnapshot } from '../_util' import basicSeed from '../seeds/basic' describe('pds admin get record view', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_get_record', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { @@ -80,7 +72,7 @@ describe('pds admin get record view', () => { it('gets a record by uri, even when taken down.', async () => { const result = await agent.api.com.atproto.admin.getRecord( { uri: sc.posts[sc.dids.alice][0].ref.uriStr }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -91,7 +83,7 @@ describe('pds admin get record view', () => { uri: sc.posts[sc.dids.alice][0].ref.uriStr, cid: sc.posts[sc.dids.alice][0].ref.cidStr, }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -105,7 +97,7 @@ describe('pds admin get record view', () => { 'badrkey', ).toString(), }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) await expect(promise).rejects.toThrow('Record not found') }) @@ -116,7 +108,7 @@ describe('pds admin get record view', () => { uri: sc.posts[sc.dids.alice][0].ref.uriStr, cid: sc.posts[sc.dids.alice][1].ref.cidStr, // Mismatching cid }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) await expect(promise).rejects.toThrow('Record not found') }) diff --git a/packages/pds/tests/admin/get-repo.test.ts b/packages/pds/tests/admin/get-repo.test.ts index 9cd38ae101f..9467643973e 100644 --- a/packages/pds/tests/admin/get-repo.test.ts +++ b/packages/pds/tests/admin/get-repo.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { ACKNOWLEDGE, @@ -7,36 +8,25 @@ import { REASONOTHER, REASONSPAM, } from '../../src/lexicon/types/com/atproto/moderation/defs' -import { - runTestServer, - forSnapshot, - CloseFn, - adminAuth, - TestServerInfo, - moderatorAuth, - triageAuth, -} from '../_util' -import { SeedClient } from '../seeds/client' +import { forSnapshot } from '../_util' import basicSeed from '../seeds/basic' describe('pds admin get repo view', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_get_repo', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { @@ -77,7 +67,7 @@ describe('pds admin get repo view', () => { it('gets a repo by did, even when taken down.', async () => { const result = await agent.api.com.atproto.admin.getRepo( { did: sc.dids.alice }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(forSnapshot(result.data)).toMatchSnapshot() }) @@ -85,15 +75,15 @@ describe('pds admin get repo view', () => { it('does not include account emails for triage mods.', async () => { const { data: admin } = await agent.api.com.atproto.admin.getRepo( { did: sc.dids.bob }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const { data: moderator } = await agent.api.com.atproto.admin.getRepo( { did: sc.dids.bob }, - { headers: { authorization: moderatorAuth() } }, + { headers: network.pds.adminAuthHeaders('moderator') }, ) const { data: triage } = await agent.api.com.atproto.admin.getRepo( { did: sc.dids.bob }, - { headers: { authorization: triageAuth() } }, + { headers: network.pds.adminAuthHeaders('triage') }, ) expect(admin.email).toEqual('bob@test.com') expect(moderator.email).toEqual('bob@test.com') @@ -104,7 +94,7 @@ describe('pds admin get repo view', () => { it('fails when repo does not exist.', async () => { const promise = agent.api.com.atproto.admin.getRepo( { did: 'did:plc:doesnotexist' }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) await expect(promise).rejects.toThrow('Repo not found') }) diff --git a/packages/pds/tests/admin/invites.test.ts b/packages/pds/tests/admin/invites.test.ts index 6cdfdbaba93..4f52400a314 100644 --- a/packages/pds/tests/admin/invites.test.ts +++ b/packages/pds/tests/admin/invites.test.ts @@ -1,30 +1,26 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' -import { - runTestServer, - adminAuth, - moderatorAuth, - TestServerInfo, -} from '../_util' import { randomStr } from '@atproto/crypto' -import { SeedClient } from '../seeds/client' describe('pds admin invite views', () => { + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient - let server: TestServerInfo beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_invites', - inviteRequired: true, - userInviteInterval: 1, + pds: { + inviteRequired: true, + inviteInterval: 1, + }, }) - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() }) afterAll(async () => { - await server.close() + await network.close() }) let alice: string @@ -34,7 +30,7 @@ describe('pds admin invite views', () => { beforeAll(async () => { const adminCode = await agent.api.com.atproto.server.createInviteCode( { useCount: 10 }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) await sc.createAccount('alice', { @@ -70,11 +66,11 @@ describe('pds admin invite views', () => { ) await agent.api.com.atproto.server.createInviteCode( { useCount: 5, forAccount: alice }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) await agent.api.com.atproto.admin.disableInviteCodes( { codes: [adminCode.data.code], accounts: [bob] }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) const useCode = async (code: string) => { @@ -94,7 +90,7 @@ describe('pds admin invite views', () => { it('gets a list of invite codes by recency', async () => { const result = await agent.api.com.atproto.admin.getInviteCodes( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) let lastDate = result.data.codes[0].createdAt for (const code of result.data.codes) { @@ -121,15 +117,15 @@ describe('pds admin invite views', () => { it('paginates by recency', async () => { const full = await agent.api.com.atproto.admin.getInviteCodes( {}, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const first = await agent.api.com.atproto.admin.getInviteCodes( { limit: 5 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const second = await agent.api.com.atproto.admin.getInviteCodes( { cursor: first.data.cursor }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const combined = [...first.data.codes, ...second.data.codes] expect(combined).toEqual(full.data.codes) @@ -138,7 +134,7 @@ describe('pds admin invite views', () => { it('gets a list of invite codes by usage', async () => { const result = await agent.api.com.atproto.admin.getInviteCodes( { sort: 'usage' }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) let lastUseCount = result.data.codes[0].uses.length for (const code of result.data.codes) { @@ -157,15 +153,15 @@ describe('pds admin invite views', () => { it('paginates by usage', async () => { const full = await agent.api.com.atproto.admin.getInviteCodes( { sort: 'usage' }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const first = await agent.api.com.atproto.admin.getInviteCodes( { sort: 'usage', limit: 5 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const second = await agent.api.com.atproto.admin.getInviteCodes( { sort: 'usage', cursor: first.data.cursor }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const combined = [...first.data.codes, ...second.data.codes] expect(combined).toEqual(full.data.codes) @@ -174,7 +170,7 @@ describe('pds admin invite views', () => { it('filters admin.searchRepos by invitedBy', async () => { const searchView = await agent.api.com.atproto.admin.searchRepos( { invitedBy: alice }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(searchView.data.repos.length).toBe(2) expect(searchView.data.repos[0].invitedBy?.available).toBe(1) @@ -186,7 +182,7 @@ describe('pds admin invite views', () => { it('hydrates invites into admin.getRepo', async () => { const aliceView = await agent.api.com.atproto.admin.getRepo( { did: alice }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(aliceView.data.invitedBy?.available).toBe(10) expect(aliceView.data.invitedBy?.uses.length).toBe(3) @@ -199,7 +195,7 @@ describe('pds admin invite views', () => { { codes: ['x'], accounts: [alice] }, { encoding: 'application/json', - headers: { authorization: moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) await expect(attemptDisableInvites).rejects.toThrow( @@ -212,7 +208,7 @@ describe('pds admin invite views', () => { { useCount: 5, forAccount: alice }, { encoding: 'application/json', - headers: { authorization: moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) await expect(attemptCreateInvite).rejects.toThrow('Insufficient privileges') @@ -222,12 +218,12 @@ describe('pds admin invite views', () => { const reasonForDisabling = 'User is selling invites' await agent.api.com.atproto.admin.disableAccountInvites( { account: carol, note: reasonForDisabling }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) const repoRes = await agent.api.com.atproto.admin.getRepo( { did: carol }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(repoRes.data.invitesDisabled).toBe(true) expect(repoRes.data.inviteNote).toBe(reasonForDisabling) @@ -244,31 +240,31 @@ describe('pds admin invite views', () => { const reasonForDisabling = 'User is selling invites' await agent.api.com.atproto.admin.enableAccountInvites( { account: carol, note: reasonForEnabling }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) const afterEnable = await agent.api.com.atproto.admin.getRepo( { did: carol }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(afterEnable.data.invitesDisabled).toBe(false) expect(afterEnable.data.inviteNote).toBe(reasonForEnabling) await agent.api.com.atproto.admin.disableAccountInvites( { account: carol, note: reasonForDisabling }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) const afterDisable = await agent.api.com.atproto.admin.getRepo( { did: carol }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(afterDisable.data.invitesDisabled).toBe(true) expect(afterDisable.data.inviteNote).toBe(reasonForDisabling) }) it('creates codes in the background but disables them', async () => { - const res = await server.ctx.db.db + const res = await network.pds.ctx.db.db .selectFrom('invite_code') .where('forUser', '=', carol) .selectAll() @@ -282,7 +278,7 @@ describe('pds admin invite views', () => { { account: alice }, { encoding: 'application/json', - headers: { authorization: moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) await expect(attempt).rejects.toThrow('Insufficient privileges') @@ -291,12 +287,12 @@ describe('pds admin invite views', () => { it('re-enables an accounts invites', async () => { await agent.api.com.atproto.admin.enableAccountInvites( { account: carol }, - { encoding: 'application/json', headers: { authorization: adminAuth() } }, + { encoding: 'application/json', headers: network.pds.adminAuthHeaders() }, ) const repoRes = await agent.api.com.atproto.admin.getRepo( { did: carol }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect(repoRes.data.invitesDisabled).toBe(false) @@ -312,7 +308,7 @@ describe('pds admin invite views', () => { { account: alice }, { encoding: 'application/json', - headers: { authorization: moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) await expect(attempt).rejects.toThrow('Insufficient privileges') diff --git a/packages/pds/tests/moderation.test.ts b/packages/pds/tests/admin/moderation.test.ts similarity index 87% rename from packages/pds/tests/moderation.test.ts rename to packages/pds/tests/admin/moderation.test.ts index edbb23c6578..c65812adfed 100644 --- a/packages/pds/tests/moderation.test.ts +++ b/packages/pds/tests/admin/moderation.test.ts @@ -1,47 +1,42 @@ +import { + TestNetworkNoAppView, + ImageRef, + RecordRef, + SeedClient, +} from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { AtUri } from '@atproto/syntax' import { BlobNotFoundError } from '@atproto/repo' -import { - adminAuth, - CloseFn, - forSnapshot, - moderatorAuth, - runTestServer, - TestServerInfo, - triageAuth, -} from './_util' -import { ImageRef, RecordRef, SeedClient } from './seeds/client' -import basicSeed from './seeds/basic' +import { forSnapshot } from '../_util' +import { PeriodicModerationActionReversal } from '../../src/db/periodic-moderation-action-reversal' +import basicSeed from '../seeds/basic' import { ACKNOWLEDGE, + ESCALATE, FLAG, TAKEDOWN, - ESCALATE, -} from '../src/lexicon/types/com/atproto/admin/defs' +} from '../../src/lexicon/types/com/atproto/admin/defs' import { REASONOTHER, REASONSPAM, -} from '../src/lexicon/types/com/atproto/moderation/defs' -import { PeriodicModerationActionReversal } from '../src' +} from '../../src/lexicon/types/com/atproto/moderation/defs' describe('moderation', () => { - let server: TestServerInfo - let close: CloseFn + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'moderation', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) describe('reporting', () => { @@ -210,7 +205,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const { data: actionResolvedReports } = @@ -222,7 +217,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -237,7 +232,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -285,8 +280,9 @@ describe('moderation', () => { await agent.api.com.atproto.server.requestAccountDelete(undefined, { headers: sc.getHeaders(deleteme.did), }) - const { token: deletionToken } = await server.ctx.db.db - .selectFrom('delete_account_token') + const { token: deletionToken } = await network.pds.ctx.db.db + .selectFrom('email_token') + .where('purpose', '=', 'delete_account') .where('did', '=', deleteme.did) .selectAll() .executeTakeFirstOrThrow() @@ -295,7 +291,7 @@ describe('moderation', () => { password: 'password', token: deletionToken, }) - await server.processAll() + await network.processAll() // Take action on deleted content const { data: action } = await agent.api.com.atproto.admin.takeModerationAction( @@ -311,7 +307,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) await agent.api.com.atproto.admin.resolveModerationReports( @@ -322,29 +318,29 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) // Check report and action details const { data: repoDeletionActionDetail } = await agent.api.com.atproto.admin.getModerationAction( { id: action.id - 1 }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const { data: recordActionDetail } = await agent.api.com.atproto.admin.getModerationAction( { id: action.id }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const { data: reportADetail } = await agent.api.com.atproto.admin.getModerationReport( { id: reportA.id }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) const { data: reportBDetail } = await agent.api.com.atproto.admin.getModerationReport( { id: reportB.id }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) expect( forSnapshot({ @@ -363,7 +359,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -396,7 +392,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -408,7 +404,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -425,7 +421,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -462,7 +458,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -474,7 +470,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -491,7 +487,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -513,7 +509,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), }, ) expect(action1).toEqual( @@ -540,7 +536,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), }, ) expect(action2).toEqual( @@ -562,7 +558,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), }, ) await agent.api.com.atproto.admin.reverseModerationAction( @@ -573,7 +569,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), }, ) }) @@ -594,7 +590,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const flagPromise = agent.api.com.atproto.admin.takeModerationAction( @@ -610,7 +606,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) await expect(flagPromise).rejects.toThrow( @@ -626,7 +622,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const { data: flag } = @@ -643,7 +639,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -656,7 +652,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -675,7 +671,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const flagPromise = agent.api.com.atproto.admin.takeModerationAction( @@ -690,7 +686,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) await expect(flagPromise).rejects.toThrow( @@ -706,7 +702,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const { data: flag } = @@ -722,7 +718,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -735,7 +731,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -759,7 +755,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const flagPromise = agent.api.com.atproto.admin.takeModerationAction( @@ -776,7 +772,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) await expect(flagPromise).rejects.toThrow( @@ -791,7 +787,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) const { data: flag } = @@ -809,7 +805,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) @@ -822,7 +818,7 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) }) @@ -841,11 +837,21 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) // cleanup - await reverse(action.id) + await agent.api.com.atproto.admin.reverseModerationAction( + { + id: action.id, + createdBy: 'did:example:admin', + reason: 'Y', + }, + { + encoding: 'application/json', + headers: network.pds.adminAuthHeaders(), + }, + ) }) it('automatically reverses actions marked with duration', async () => { @@ -865,18 +871,20 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), }, ) // In the actual app, this will be instantiated and run on server startup - const periodicReversal = new PeriodicModerationActionReversal(server.ctx) + const periodicReversal = new PeriodicModerationActionReversal( + network.pds.ctx, + ) await periodicReversal.findAndRevertDueActions() const { data: reversedAction } = await agent.api.com.atproto.admin.getModerationAction( { id: action.id }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) // Verify that the automatic reversal is attributed to the original moderator of the temporary action @@ -901,27 +909,13 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), }, ) await expect(attemptTakedownTriage).rejects.toThrow( 'Must be a full moderator to perform an account takedown', ) }) - - async function reverse(actionId: number) { - await agent.api.com.atproto.admin.reverseModerationAction( - { - id: actionId, - createdBy: 'did:example:admin', - reason: 'Y', - }, - { - encoding: 'application/json', - headers: { authorization: adminAuth() }, - }, - ) - } }) describe('blob takedown', () => { @@ -946,21 +940,21 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) actionId = takeAction.data.id }) it('removes blob from the store', async () => { - const tryGetBytes = server.ctx.blobstore.getBytes(blob.image.ref) + const tryGetBytes = network.pds.ctx.blobstore.getBytes(blob.image.ref) await expect(tryGetBytes).rejects.toThrow(BlobNotFoundError) }) it('prevents blob from being referenced again.', async () => { const uploaded = await sc.uploadFile( sc.dids.alice, - 'tests/image/fixtures/key-alt.jpg', + 'tests/sample-img/key-alt.jpg', 'image/jpeg', ) expect(uploaded.image.ref.equals(blob.image.ref)).toBeTruthy() @@ -968,6 +962,14 @@ describe('moderation', () => { await expect(referenceBlob).rejects.toThrow('Could not find blob:') }) + it('prevents image blob from being served, even when cached.', async () => { + const attempt = agent.api.com.atproto.sync.getBlob({ + did: sc.dids.carol, + cid: blob.image.ref.toString(), + }) + await expect(attempt).rejects.toThrow('Blob not found') + }) + it('restores blob when action is reversed.', async () => { await agent.api.com.atproto.admin.reverseModerationAction( { @@ -977,13 +979,21 @@ describe('moderation', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) // Can post and reference blob const post = await sc.post(sc.dids.alice, 'pic', [], [blob]) expect(post.images[0].image.ref.equals(blob.image.ref)).toBeTruthy() + + // Can fetch through image server + const res = await agent.api.com.atproto.sync.getBlob({ + did: sc.dids.carol, + cid: blob.image.ref.toString(), + }) + + expect(res.data.byteLength).toBeGreaterThan(9000) }) }) }) diff --git a/packages/pds/tests/admin/repo-search.test.ts b/packages/pds/tests/admin/repo-search.test.ts index e3fcdef2d80..b95dde6063d 100644 --- a/packages/pds/tests/admin/repo-search.test.ts +++ b/packages/pds/tests/admin/repo-search.test.ts @@ -1,28 +1,27 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' -import { runTestServer, CloseFn, paginateAll, adminAuth } from '../_util' -import { SeedClient } from '../seeds/client' +import { paginateAll } from '../_util' import usersBulkSeed from '../seeds/users-bulk' describe('pds admin repo search view', () => { + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn let sc: SeedClient let headers: { [s: string]: string } beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_admin_repo_search', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await usersBulkSeed(sc) - headers = { authorization: adminAuth() } + headers = network.pds.adminAuthHeaders() }) afterAll(async () => { - await close() + await network.close() }) beforeAll(async () => { diff --git a/packages/pds/tests/app-passwords.test.ts b/packages/pds/tests/app-passwords.test.ts index c67b335bef7..c8e1309dda8 100644 --- a/packages/pds/tests/app-passwords.test.ts +++ b/packages/pds/tests/app-passwords.test.ts @@ -1,20 +1,18 @@ +import { TestNetworkNoAppView } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import * as jwt from 'jsonwebtoken' -import { CloseFn, runTestServer, TestServerInfo } from './_util' describe('app_passwords', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let accntAgent: AtpAgent let appAgent: AtpAgent - let close: CloseFn beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'app_passwords', }) - accntAgent = new AtpAgent({ service: server.url }) - appAgent = new AtpAgent({ service: server.url }) - close = server.close + accntAgent = network.pds.getClient() + appAgent = network.pds.getClient() await accntAgent.createAccount({ handle: 'alice.test', @@ -24,7 +22,7 @@ describe('app_passwords', () => { }) afterAll(async () => { - await close() + await network.close() }) let appPass: string @@ -128,7 +126,7 @@ describe('app_passwords', () => { }) it('no longer allows session creation after revocation', async () => { - const newAgent = new AtpAgent({ service: server.url }) + const newAgent = network.pds.getClient() const attempt = newAgent.login({ identifier: 'alice.test', password: appPass, diff --git a/packages/pds/tests/auth.test.ts b/packages/pds/tests/auth.test.ts index ae78f3d5619..d94eebf17e1 100644 --- a/packages/pds/tests/auth.test.ts +++ b/packages/pds/tests/auth.test.ts @@ -1,26 +1,23 @@ -import AtpAgent from '@atproto/api' import * as jwt from 'jsonwebtoken' +import AtpAgent from '@atproto/api' +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import * as CreateSession from '@atproto/api/src/client/types/com/atproto/server/createSession' import * as RefreshSession from '@atproto/api/src/client/types/com/atproto/server/refreshSession' -import { SeedClient } from './seeds/client' -import { adminAuth, CloseFn, runTestServer, TestServerInfo } from './_util' describe('auth', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent - let close: CloseFn beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'auth', }) - agent = new AtpAgent({ service: server.url }) - close = server.close + agent = network.pds.getClient() }) afterAll(async () => { - await close() + await network.close() }) const createAccount = async (info) => { @@ -173,7 +170,7 @@ describe('auth', () => { }) it('refresh token is revoked after grace period completes.', async () => { - const { db } = server.ctx + const { db } = network.pds.ctx const account = await createAccount({ handle: 'evan.test', email: 'evan@test.com', @@ -228,7 +225,7 @@ describe('auth', () => { }) it('expired refresh token cannot be used to refresh a session.', async () => { - const { auth } = server.ctx + const { auth } = network.pds.ctx const account = await createAccount({ handle: 'holga.test', email: 'holga@test.com', @@ -258,7 +255,7 @@ describe('auth', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: { authorization: network.pds.adminAuth() }, }, ) await expect( @@ -284,7 +281,7 @@ describe('auth', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: { authorization: network.pds.adminAuth() }, }, ) await expect(refreshSession(account.refreshJwt)).rejects.toThrow( diff --git a/packages/pds/tests/blob-deletes.test.ts b/packages/pds/tests/blob-deletes.test.ts index aa8122423ba..bf7f36c256c 100644 --- a/packages/pds/tests/blob-deletes.test.ts +++ b/packages/pds/tests/blob-deletes.test.ts @@ -1,12 +1,11 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent, { BlobRef } from '@atproto/api' -import { runTestServer, TestServerInfo } from './_util' import { Database } from '../src' import DiskBlobStore from '../src/storage/disk-blobstore' import { ids } from '../src/lexicon/lexicons' -import { SeedClient } from './seeds/client' describe('blob deletes', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient @@ -17,13 +16,13 @@ describe('blob deletes', () => { let bob: string beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'blob_deletes', }) - blobstore = server.ctx.blobstore as DiskBlobStore - db = server.ctx.db - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + blobstore = network.pds.ctx.blobstore as DiskBlobStore + db = network.pds.ctx.db + agent = network.pds.getClient() + sc = network.getSeedClient() await sc.createAccount('alice', { email: 'alice@test.com', handle: 'alice.test', @@ -39,7 +38,7 @@ describe('blob deletes', () => { }) afterAll(async () => { - await server.close() + await network.close() }) const getDbBlobsForDid = (did: string) => { @@ -53,12 +52,12 @@ describe('blob deletes', () => { it('deletes blob when record is deleted', async () => { const img = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) const post = await sc.post(alice, 'test', undefined, [img]) await sc.deletePost(alice, post.ref.uri) - await server.processAll() + await network.processAll() const dbBlobs = await getDbBlobsForDid(alice) expect(dbBlobs.length).toBe(0) @@ -70,17 +69,17 @@ describe('blob deletes', () => { it('deletes blob when blob-ref in record is updated', async () => { const img = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) const img2 = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) await updateProfile(sc, alice, img.image, img.image) await updateProfile(sc, alice, img2.image, img2.image) - await server.processAll() + await network.processAll() const dbBlobs = await getDbBlobsForDid(alice) expect(dbBlobs.length).toBe(1) @@ -99,17 +98,17 @@ describe('blob deletes', () => { it('does not delete blob when blob-ref in record is not updated', async () => { const img = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) const img2 = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) await updateProfile(sc, alice, img.image, img.image) await updateProfile(sc, alice, img.image, img2.image) - await server.processAll() + await network.processAll() const dbBlobs = await getDbBlobsForDid(alice) expect(dbBlobs.length).toBe(2) @@ -125,7 +124,7 @@ describe('blob deletes', () => { it('does not delete blob when blob is reused by another record in same commit', async () => { const img = await sc.uploadFile( alice, - 'tests/image/fixtures/key-portrait-small.jpg', + 'tests/sample-img/key-portrait-small.jpg', 'image/jpeg', ) const post = await sc.post(alice, 'post', undefined, [img]) @@ -160,7 +159,7 @@ describe('blob deletes', () => { }, { encoding: 'application/json', headers: sc.getHeaders(alice) }, ) - await server.processAll() + await network.processAll() const dbBlobs = await getDbBlobsForDid(alice) expect(dbBlobs.length).toBe(1) @@ -172,12 +171,12 @@ describe('blob deletes', () => { it('does not delete blob from blob store if another user is using it', async () => { const imgAlice = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) const imgBob = await sc.uploadFile( bob, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) const postAlice = await sc.post(alice, 'post', undefined, [imgAlice]) diff --git a/packages/pds/tests/create-post.test.ts b/packages/pds/tests/create-post.test.ts index e2763981fb0..81d813b9aeb 100644 --- a/packages/pds/tests/create-post.test.ts +++ b/packages/pds/tests/create-post.test.ts @@ -1,25 +1,29 @@ -import AtpAgent, { AppBskyFeedPost, AtUri } from '@atproto/api' -import { runTestServer, TestServerInfo } from './_util' -import { SeedClient } from './seeds/client' +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' +import AtpAgent, { + AppBskyFeedPost, + AtUri, + RichText, + AppBskyRichtextFacet, +} from '@atproto/api' import basicSeed from './seeds/basic' describe('pds posts record creation', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'views_posts', }) - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) - await server.processAll() + await network.processAll() }) afterAll(async () => { - await server.close() + await network.close() }) it('allows for creating posts with tags', async () => { @@ -42,4 +46,32 @@ describe('pds posts record creation', () => { expect(record).toBeTruthy() expect(record.tags).toEqual(['javascript', 'hehe']) }) + + it('handles RichText tag facets as well', async () => { + const rt = new RichText({ text: 'hello #world' }) + await rt.detectFacets(agent) + + const post: AppBskyFeedPost.Record = { + text: rt.text, + facets: rt.facets, + createdAt: new Date().toISOString(), + } + + const res = await agent.api.app.bsky.feed.post.create( + { repo: sc.dids.alice }, + post, + sc.getHeaders(sc.dids.alice), + ) + const { value: record } = await agent.api.app.bsky.feed.post.get({ + repo: sc.dids.alice, + rkey: new AtUri(res.uri).rkey, + }) + + expect(record).toBeTruthy() + expect( + record.facets?.every((f) => { + return AppBskyRichtextFacet.isTag(f.features[0]) + }), + ).toBeTruthy() + }) }) diff --git a/packages/pds/tests/crud.test.ts b/packages/pds/tests/crud.test.ts index c86f750cfac..c0902e2db29 100644 --- a/packages/pds/tests/crud.test.ts +++ b/packages/pds/tests/crud.test.ts @@ -1,6 +1,8 @@ import fs from 'fs/promises' import { AtUri } from '@atproto/syntax' import AtpAgent from '@atproto/api' +import { BlobRef } from '@atproto/lexicon' +import { TestNetworkNoAppView } from '@atproto/dev-env' import * as createRecord from '@atproto/api/src/client/types/com/atproto/repo/createRecord' import * as putRecord from '@atproto/api/src/client/types/com/atproto/repo/putRecord' import * as deleteRecord from '@atproto/api/src/client/types/com/atproto/repo/deleteRecord' @@ -9,10 +11,9 @@ import { cidForCbor, TID, ui8ToArrayBuffer } from '@atproto/common' import { BlobNotFoundError } from '@atproto/repo' import { defaultFetchHandler } from '@atproto/xrpc' import * as Post from '../src/lexicon/types/app/bsky/feed/post' -import { adminAuth, CloseFn, paginateAll, runTestServer } from './_util' +import { paginateAll } from './_util' import AppContext from '../src/context' import { TAKEDOWN } from '../src/lexicon/types/com/atproto/admin/defs' -import { BlobRef } from '@atproto/lexicon' import { ids } from '../src/lexicon/lexicons' const alice = { @@ -29,25 +30,24 @@ const bob = { } describe('crud operations', () => { + let network: TestNetworkNoAppView let ctx: AppContext let agent: AtpAgent let aliceAgent: AtpAgent let bobAgent: AtpAgent - let close: CloseFn beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'crud', }) - ctx = server.ctx - close = server.close - agent = new AtpAgent({ service: server.url }) - aliceAgent = new AtpAgent({ service: server.url }) - bobAgent = new AtpAgent({ service: server.url }) + ctx = network.pds.ctx + agent = network.pds.getClient() + aliceAgent = network.pds.getClient() + bobAgent = network.pds.getClient() }) afterAll(async () => { - await close() + await network.close() }) it('registers users', async () => { @@ -174,9 +174,7 @@ describe('crud operations', () => { }) it('attaches images to a post', async () => { - const file = await fs.readFile( - 'tests/image/fixtures/key-landscape-small.jpg', - ) + const file = await fs.readFile('tests/sample-img/key-landscape-small.jpg') const uploadedRes = await aliceAgent.api.com.atproto.repo.uploadBlob(file, { encoding: 'image/jpeg', }) @@ -1170,7 +1168,7 @@ describe('crud operations', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: { authorization: network.pds.adminAuth() }, }, ) @@ -1193,7 +1191,7 @@ describe('crud operations', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: { authorization: network.pds.adminAuth() }, }, ) }) @@ -1215,7 +1213,7 @@ describe('crud operations', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: { authorization: network.pds.adminAuth() }, }, ) @@ -1233,7 +1231,7 @@ describe('crud operations', () => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: { authorization: network.pds.adminAuth() }, }, ) }) diff --git a/packages/pds/tests/db.test.ts b/packages/pds/tests/db.test.ts index 6e4192cfac8..1a2a42f0930 100644 --- a/packages/pds/tests/db.test.ts +++ b/packages/pds/tests/db.test.ts @@ -1,26 +1,23 @@ import { sql } from 'kysely' import { once } from 'events' +import { TestNetworkNoAppView } from '@atproto/dev-env' import { createDeferrable, wait } from '@atproto/common' import { Database } from '../src' import { Leader, appMigration } from '../src/db/leader' -import { runTestServer, CloseFn } from './_util' describe('db', () => { - let close: CloseFn + let network: TestNetworkNoAppView let db: Database beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'db', }) - close = server.close - db = server.ctx.db + db = network.pds.ctx.db }) afterAll(async () => { - if (close) { - await close() - } + await network.close() }) describe('transaction()', () => { diff --git a/packages/pds/tests/email-confirmation.test.ts b/packages/pds/tests/email-confirmation.test.ts index 48a0c375510..d6d4213986f 100644 --- a/packages/pds/tests/email-confirmation.test.ts +++ b/packages/pds/tests/email-confirmation.test.ts @@ -1,10 +1,9 @@ import { once, EventEmitter } from 'events' import Mail from 'nodemailer/lib/mailer' import AtpAgent from '@atproto/api' -import { SeedClient } from './seeds/client' +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import userSeed from './seeds/users' import { ServerMailer } from '../src/mailer' -import { TestNetworkNoAppView } from '@atproto/dev-env' import { ComAtprotoServerConfirmEmail, ComAtprotoServerUpdateEmail, @@ -27,7 +26,7 @@ describe('email confirmation', () => { }) mailer = network.pds.ctx.mailer agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await userSeed(sc) alice = sc.accounts[sc.dids.alice] @@ -61,7 +60,7 @@ describe('email confirmation', () => { expect(session.data.emailConfirmed).toEqual(false) }) - it('disallows email update without token when unverified', async () => { + it('disallows email update when unverified', async () => { const res = await agent.api.com.atproto.server.requestEmailUpdate( undefined, { headers: sc.getHeaders(alice.did) }, @@ -75,22 +74,36 @@ describe('email confirmation', () => { { headers: sc.getHeaders(alice.did), encoding: 'application/json' }, ) await expect(attempt).rejects.toThrow() - - // await agent.api.com.atproto.server.updateEmail( - // { - // email: 'new-alice@example.com', - // }, - // { headers: sc.getHeaders(alice.did), encoding: 'application/json' }, - // ) - // const session = await agent.api.com.atproto.server.getSession( - // {}, - // { headers: sc.getHeaders(alice.did) }, - // ) - // expect(session.data.email).toEqual('new-alice@example.com') - // expect(session.data.emailConfirmed).toEqual(false) - // alice.email = session.data.email + const session = await agent.api.com.atproto.server.getSession( + {}, + { headers: sc.getHeaders(alice.did) }, + ) + expect(session.data.email).toEqual(alice.email) + expect(session.data.emailConfirmed).toEqual(false) }) + // it('allows email update without token when unverified', async () => { + // const res = await agent.api.com.atproto.server.requestEmailUpdate( + // undefined, + // { headers: sc.getHeaders(alice.did) }, + // ) + // expect(res.data.tokenRequired).toBe(false) + + // await agent.api.com.atproto.server.updateEmail( + // { + // email: 'new-alice@example.com', + // }, + // { headers: sc.getHeaders(alice.did), encoding: 'application/json' }, + // ) + // const session = await agent.api.com.atproto.server.getSession( + // {}, + // { headers: sc.getHeaders(alice.did) }, + // ) + // expect(session.data.email).toEqual('new-alice@example.com') + // expect(session.data.emailConfirmed).toEqual(false) + // alice.email = session.data.email + // }) + let confirmToken it('requests email confirmation', async () => { @@ -190,6 +203,19 @@ describe('email confirmation', () => { ) }) + it('fails email update with a badly formatted email', async () => { + const attempt = agent.api.com.atproto.server.updateEmail( + { + email: 'bad-email@disposeamail.com', + token: updateToken, + }, + { headers: sc.getHeaders(alice.did), encoding: 'application/json' }, + ) + await expect(attempt).rejects.toThrow( + 'This email address is not supported, please use a different email.', + ) + }) + it('updates email', async () => { await agent.api.com.atproto.server.updateEmail( { diff --git a/packages/pds/tests/file-uploads.test.ts b/packages/pds/tests/file-uploads.test.ts index cd26c7fa0a8..07b4c6ebb55 100644 --- a/packages/pds/tests/file-uploads.test.ts +++ b/packages/pds/tests/file-uploads.test.ts @@ -1,13 +1,13 @@ import fs from 'fs/promises' import { gzipSync } from 'zlib' import AtpAgent from '@atproto/api' -import { CloseFn, runTestServer, TestServerInfo } from './_util' -import { Database, ServerConfig } from '../src' +import { Database } from '../src' import DiskBlobStore from '../src/storage/disk-blobstore' import * as uint8arrays from 'uint8arrays' import { randomBytes } from '@atproto/crypto' import { BlobRef } from '@atproto/lexicon' import { ids } from '../src/lexicon/lexicons' +import { TestNetworkNoAppView } from '@atproto/dev-env' const alice = { email: 'alice@test.com', @@ -23,30 +23,24 @@ const bob = { } describe('file uploads', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let aliceAgent: AtpAgent let bobAgent: AtpAgent let blobstore: DiskBlobStore let db: Database - let cfg: ServerConfig - let serverUrl: string - let close: CloseFn beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'file_uploads', }) - blobstore = server.ctx.blobstore as DiskBlobStore - db = server.ctx.db - close = server.close - aliceAgent = new AtpAgent({ service: server.url }) - bobAgent = new AtpAgent({ service: server.url }) - cfg = server.ctx.cfg - serverUrl = server.url + blobstore = network.pds.ctx.blobstore as DiskBlobStore + db = network.pds.ctx.db + aliceAgent = network.pds.getClient() + bobAgent = network.pds.getClient() }) afterAll(async () => { - await close() + await network.close() }) it('registers users', async () => { @@ -69,30 +63,33 @@ describe('file uploads', () => { it('handles client abort', async () => { const abortController = new AbortController() - const _putTemp = server.ctx.blobstore.putTemp - server.ctx.blobstore.putTemp = function (...args) { + const _putTemp = network.pds.ctx.blobstore.putTemp + network.pds.ctx.blobstore.putTemp = function (...args) { // Abort just as processing blob in packages/pds/src/services/repo/blobs.ts process.nextTick(() => abortController.abort()) return _putTemp.call(this, ...args) } - const response = fetch(`${server.url}/xrpc/com.atproto.repo.uploadBlob`, { - method: 'post', - body: Buffer.alloc(5000000), // Enough bytes to get some chunking going on - signal: abortController.signal, - headers: { - 'content-type': 'image/jpeg', - authorization: `Bearer ${aliceAgent.session?.accessJwt}`, + const response = fetch( + `${network.pds.url}/xrpc/com.atproto.repo.uploadBlob`, + { + method: 'post', + body: Buffer.alloc(5000000), // Enough bytes to get some chunking going on + signal: abortController.signal, + headers: { + 'content-type': 'image/jpeg', + authorization: `Bearer ${aliceAgent.session?.accessJwt}`, + }, }, - }) + ) await expect(response).rejects.toThrow('operation was aborted') // Cleanup - server.ctx.blobstore.putTemp = _putTemp + network.pds.ctx.blobstore.putTemp = _putTemp // This test would fail from an uncaught exception: this grace period gives time for that to surface await new Promise((res) => setTimeout(res, 10)) }) it('uploads files', async () => { - smallFile = await fs.readFile('tests/image/fixtures/key-portrait-small.jpg') + smallFile = await fs.readFile('tests/sample-img/key-portrait-small.jpg') const res = await aliceAgent.api.com.atproto.repo.uploadBlob(smallFile, { encoding: 'image/jpeg', }) @@ -149,7 +146,7 @@ describe('file uploads', () => { let largeFile: Uint8Array it('does not allow referencing a file that is outside blob constraints', async () => { - largeFile = await fs.readFile('tests/image/fixtures/hd-key.jpg') + largeFile = await fs.readFile('tests/sample-img/hd-key.jpg') const res = await aliceAgent.api.com.atproto.repo.uploadBlob(largeFile, { encoding: 'image/jpeg', }) @@ -175,9 +172,7 @@ describe('file uploads', () => { }) it('permits duplicate uploads of the same file', async () => { - const file = await fs.readFile( - 'tests/image/fixtures/key-landscape-small.jpg', - ) + const file = await fs.readFile('tests/sample-img/key-landscape-small.jpg') const { data: uploadA } = await aliceAgent.api.com.atproto.repo.uploadBlob( file, { @@ -237,9 +232,7 @@ describe('file uploads', () => { }) it('corrects a bad mimetype', async () => { - const file = await fs.readFile( - 'tests/image/fixtures/key-landscape-large.jpg', - ) + const file = await fs.readFile('tests/sample-img/key-landscape-large.jpg') const res = await aliceAgent.api.com.atproto.repo.uploadBlob(file, { encoding: 'video/mp4', } as any) @@ -256,7 +249,7 @@ describe('file uploads', () => { }) it('handles pngs', async () => { - const file = await fs.readFile('tests/image/fixtures/at.png') + const file = await fs.readFile('tests/sample-img/at.png') const res = await aliceAgent.api.com.atproto.repo.uploadBlob(file, { encoding: 'image/png', }) diff --git a/packages/pds/tests/handles.test.ts b/packages/pds/tests/handles.test.ts index d6b6fd60caa..7c6833bdb78 100644 --- a/packages/pds/tests/handles.test.ts +++ b/packages/pds/tests/handles.test.ts @@ -1,8 +1,7 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { IdResolver } from '@atproto/identity' -import { SeedClient } from './seeds/client' import basicSeed from './seeds/basic' -import * as util from './_util' import { AppContext } from '../src' // outside of suite so they can be used in mock @@ -24,8 +23,8 @@ jest.mock('dns/promises', () => { }) describe('handles', () => { + let network: TestNetworkNoAppView let agent: AtpAgent - let close: util.CloseFn let sc: SeedClient let ctx: AppContext let idResolver: IdResolver @@ -33,21 +32,20 @@ describe('handles', () => { const newHandle = 'alice2.test' beforeAll(async () => { - const server = await util.runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'handles', }) - ctx = server.ctx - idResolver = new IdResolver({ plcUrl: ctx.cfg.didPlcUrl }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + ctx = network.pds.ctx + idResolver = new IdResolver({ plcUrl: ctx.cfg.identity.plcUrl }) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob }) afterAll(async () => { - await close() + await network.close() }) const getHandleFromDb = async (did: string): Promise => { @@ -213,7 +211,7 @@ describe('handles', () => { handle: 'bob-alt.test', }, { - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), encoding: 'application/json', }, ) @@ -229,7 +227,7 @@ describe('handles', () => { handle: 'dril.test', }, { - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), encoding: 'application/json', }, ) @@ -261,7 +259,7 @@ describe('handles', () => { handle: 'bob-alt.test', }, { - headers: { authorization: util.moderatorAuth() }, + headers: network.pds.adminAuthHeaders('moderator'), encoding: 'application/json', }, ) @@ -272,7 +270,7 @@ describe('handles', () => { handle: 'bob-alt.test', }, { - headers: { authorization: util.triageAuth() }, + headers: network.pds.adminAuthHeaders('triage'), encoding: 'application/json', }, ) diff --git a/packages/pds/tests/invite-codes.test.ts b/packages/pds/tests/invite-codes.test.ts index e5abe1cdab6..f406b77cc3b 100644 --- a/packages/pds/tests/invite-codes.test.ts +++ b/packages/pds/tests/invite-codes.test.ts @@ -1,32 +1,31 @@ import AtpAgent, { ComAtprotoServerCreateAccount } from '@atproto/api' import * as crypto from '@atproto/crypto' +import { TestNetworkNoAppView } from '@atproto/dev-env' import { AppContext } from '../src' -import * as util from './_util' import { DAY } from '@atproto/common' import { genInvCodes } from '../src/api/com/atproto/server/util' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' describe('account', () => { - let serverUrl: string + let network: TestNetworkNoAppView let ctx: AppContext let agent: AtpAgent - let close: util.CloseFn beforeAll(async () => { - const server = await util.runTestServer({ - inviteRequired: true, - userInviteInterval: DAY, - userInviteEpoch: Date.now() - 3 * DAY, + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'invite_codes', + pds: { + inviteRequired: true, + inviteInterval: DAY, + inviteEpoch: Date.now() - 3 * DAY, + }, }) - close = server.close - ctx = server.ctx - serverUrl = server.url - agent = new AtpAgent({ service: serverUrl }) + ctx = network.pds.ctx + agent = network.pds.getClient() }) afterAll(async () => { - await close() + await network.close() }) it('describes the fact that invites are required', async () => { @@ -35,7 +34,7 @@ describe('account', () => { }) it('succeeds with a valid code', async () => { - const code = await createInviteCode(agent, 1) + const code = await createInviteCode(network, agent, 1) await createAccountWithInvite(agent, code) }) @@ -47,9 +46,9 @@ describe('account', () => { }) it('fails on invite code from takendown account', async () => { - const account = await makeLoggedInAccount(agent) + const account = await makeLoggedInAccount(network, agent) // assign an invite code to the user - const code = await createInviteCode(agent, 1, account.did) + const code = await createInviteCode(network, agent, 1, account.did) // takedown the user's account const { data: takedownAction } = await agent.api.com.atproto.admin.takeModerationAction( @@ -64,7 +63,7 @@ describe('account', () => { }, { encoding: 'application/json', - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) // attempt to create account with the previously generated invite code @@ -82,7 +81,7 @@ describe('account', () => { }, { encoding: 'application/json', - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), }, ) // attempt to create account with the previously generated invite code @@ -90,7 +89,7 @@ describe('account', () => { }) it('fails on used up invite code', async () => { - const code = await createInviteCode(agent, 2) + const code = await createInviteCode(network, agent, 2) await createAccountsWithInvite(agent, code, 2) const promise = createAccountWithInvite(agent, code) await expect(promise).rejects.toThrow( @@ -99,7 +98,7 @@ describe('account', () => { }) it('handles racing invite code uses', async () => { - const inviteCode = await createInviteCode(agent, 1) + const inviteCode = await createInviteCode(network, agent, 1) const COUNT = 10 let successes = 0 @@ -122,7 +121,7 @@ describe('account', () => { }) it('allow users to get available user invites', async () => { - const account = await makeLoggedInAccount(agent) + const account = await makeLoggedInAccount(network, agent) // no codes available yet const res1 = @@ -151,7 +150,7 @@ describe('account', () => { }) it('admin gifted codes to not impact a users available codes', async () => { - const account = await makeLoggedInAccount(agent) + const account = await makeLoggedInAccount(network, agent) // again, pretend account was made 2 days ago const twoDaysAgo = new Date(Date.now() - 2 * DAY).toISOString() @@ -161,9 +160,9 @@ describe('account', () => { .where('did', '=', account.did) .execute() - await createInviteCode(agent, 1, account.did) - await createInviteCode(agent, 1, account.did) - await createInviteCode(agent, 1, account.did) + await createInviteCode(network, agent, 1, account.did) + await createInviteCode(network, agent, 1, account.did) + await createInviteCode(network, agent, 1, account.did) const res = await account.agent.api.com.atproto.server.getAccountInviteCodes() @@ -181,7 +180,7 @@ describe('account', () => { }) it('creates invites based on epoch', async () => { - const account = await makeLoggedInAccount(agent) + const account = await makeLoggedInAccount(network, agent) // first, pretend account was made 2 days ago & get those two codes const twoDaysAgo = new Date(Date.now() - 2 * DAY).toISOString() @@ -249,9 +248,9 @@ describe('account', () => { }) it('prevents use of disabled codes', async () => { - const first = await createInviteCode(agent, 1) - const account = await makeLoggedInAccount(agent) - const second = await createInviteCode(agent, 1, account.did) + const first = await createInviteCode(network, agent, 1) + const account = await makeLoggedInAccount(network, agent) + const second = await createInviteCode(network, agent, 1, account.did) // disabled first by code & second by did await agent.api.com.atproto.admin.disableInviteCodes( @@ -260,7 +259,7 @@ describe('account', () => { accounts: [account.did], }, { - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), encoding: 'application/json', }, ) @@ -279,7 +278,7 @@ describe('account', () => { accounts: ['admin'], }, { - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), encoding: 'application/json', }, ) @@ -295,7 +294,7 @@ describe('account', () => { forAccounts: accounts, }, { - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), encoding: 'application/json', }, ) @@ -321,6 +320,7 @@ describe('account', () => { }) const createInviteCode = async ( + network: TestNetworkNoAppView, agent: AtpAgent, uses: number, forAccount?: string, @@ -328,7 +328,7 @@ const createInviteCode = async ( const res = await agent.api.com.atproto.server.createInviteCode( { useCount: uses, forAccount }, { - headers: { authorization: util.adminAuth() }, + headers: network.pds.adminAuthHeaders(), encoding: 'application/json', }, ) @@ -360,9 +360,10 @@ const createAccountsWithInvite = async ( } const makeLoggedInAccount = async ( + network: TestNetworkNoAppView, agent: AtpAgent, ): Promise<{ did: string; agent: AtpAgent }> => { - const code = await createInviteCode(agent, 1) + const code = await createInviteCode(network, agent, 1) const account = await createAccountWithInvite(agent, code) const did = account.did const loggedInAgent = new AtpAgent({ service: agent.service.toString() }) diff --git a/packages/pds/tests/preferences.test.ts b/packages/pds/tests/preferences.test.ts index 0194e967575..77dc256f85f 100644 --- a/packages/pds/tests/preferences.test.ts +++ b/packages/pds/tests/preferences.test.ts @@ -1,26 +1,23 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' -import { CloseFn, runTestServer, TestServerInfo } from './_util' -import { SeedClient } from './seeds/client' import usersSeed from './seeds/users' describe('user preferences', () => { - let server: TestServerInfo - let close: CloseFn + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'preferences', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await usersSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) it('requires auth to set or put preferences.', async () => { @@ -45,7 +42,7 @@ describe('user preferences', () => { }) it('only gets preferences in app.bsky namespace.', async () => { - const { db, services } = server.ctx + const { db, services } = network.pds.ctx await db.transaction(async (tx) => { await services .account(tx) @@ -101,7 +98,7 @@ describe('user preferences', () => { ], }) // Ensure other prefs were not clobbered - const { db, services } = server.ctx + const { db, services } = network.pds.ctx const otherPrefs = await services .account(db) .getPreferences(sc.dids.alice, 'com.atproto') diff --git a/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap b/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap index 400f405fed3..d928dc99923 100644 --- a/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap +++ b/packages/pds/tests/proxied/__snapshots__/feedgen.test.ts.snap @@ -96,7 +96,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(6)@jpeg", }, @@ -129,7 +129,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -369,7 +369,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(6)@jpeg", }, @@ -402,7 +402,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -605,7 +605,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -616,7 +616,7 @@ Object { }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -813,12 +813,12 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(6)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(11)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(11)@jpeg", }, @@ -886,7 +886,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -897,7 +897,7 @@ Object { }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", diff --git a/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap b/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap index 994589ff0b2..67c7286359d 100644 --- a/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap +++ b/packages/pds/tests/proxied/__snapshots__/views.test.ts.snap @@ -352,7 +352,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(1)/cids(3)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(1)/cids(3)@jpeg", }, @@ -385,7 +385,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -870,7 +870,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(6)@jpeg", }, @@ -903,7 +903,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1015,7 +1015,7 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(6)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(6)@jpeg", }, @@ -1048,7 +1048,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1251,7 +1251,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1262,7 +1262,7 @@ Object { }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1627,12 +1627,12 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(4)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(4)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(3)/cids(5)@jpeg", "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(3)/cids(5)@jpeg", }, @@ -1700,7 +1700,7 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1711,7 +1711,7 @@ Object { }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", @@ -1770,11 +1770,28 @@ Object { Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, @@ -1783,6 +1800,228 @@ Object { "cid": "cids(0)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], + "likeCount": 0, + "record": Object { + "$type": "app.bsky.feed.post", + "createdAt": "1970-01-01T00:00:00.000Z", + "reply": Object { + "parent": Object { + "cid": "cids(4)", + "uri": "record(3)", + }, + "root": Object { + "cid": "cids(3)", + "uri": "record(2)", + }, + }, + "text": "thanks bob", + }, + "replyCount": 0, + "repostCount": 1, + "uri": "record(0)", + "viewer": Object {}, + }, + "reason": Object { + "$type": "app.bsky.feed.defs#reasonRepost", + "by": Object { + "did": "user(2)", + "handle": "dan.test", + "labels": Array [ + Object { + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "user(2)", + "val": "repo-action-label", + }, + ], + "viewer": Object { + "blockedBy": false, + "following": "record(4)", + "muted": false, + }, + }, + "indexedAt": "1970-01-01T00:00:00.000Z", + }, + "reply": Object { + "parent": Object { + "$type": "app.bsky.feed.defs#postView", + "author": Object { + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", + "displayName": "bobby", + "handle": "bob.test", + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], + "viewer": Object { + "blockedBy": false, + "followedBy": "record(6)", + "following": "record(5)", + "muted": false, + }, + }, + "cid": "cids(4)", + "embed": Object { + "$type": "app.bsky.embed.images#view", + "images": Array [ + Object { + "alt": "tests/sample-img/key-landscape-small.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(6)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(6)@jpeg", + }, + ], + }, + "indexedAt": "1970-01-01T00:00:00.000Z", + "labels": Array [ + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label", + }, + Object { + "cid": "cids(4)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "record(3)", + "val": "test-label-2", + }, + ], + "likeCount": 0, + "record": Object { + "$type": "app.bsky.feed.post", + "createdAt": "1970-01-01T00:00:00.000Z", + "embed": Object { + "$type": "app.bsky.embed.images", + "images": Array [ + Object { + "alt": "tests/sample-img/key-landscape-small.jpg", + "image": Object { + "$type": "blob", + "mimeType": "image/jpeg", + "ref": Object { + "$link": "cids(6)", + }, + "size": 4114, + }, + }, + ], + }, + "reply": Object { + "parent": Object { + "cid": "cids(3)", + "uri": "record(2)", + }, + "root": Object { + "cid": "cids(3)", + "uri": "record(2)", + }, + }, + "text": "hear that label_me label_me_2", + }, + "replyCount": 1, + "repostCount": 0, + "uri": "record(3)", + "viewer": Object {}, + }, + "root": Object { + "$type": "app.bsky.feed.defs#postView", + "author": Object { + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", + "did": "user(0)", + "displayName": "ali", + "handle": "alice.test", + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], + "viewer": Object { + "blockedBy": false, + "muted": false, + }, + }, + "cid": "cids(3)", + "indexedAt": "1970-01-01T00:00:00.000Z", + "labels": Array [], + "likeCount": 3, + "record": Object { + "$type": "app.bsky.feed.post", + "createdAt": "1970-01-01T00:00:00.000000Z", + "text": "again", + }, + "replyCount": 2, + "repostCount": 1, + "uri": "record(2)", + "viewer": Object {}, + }, + }, + }, + Object { + "post": Object { + "author": Object { + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", + "did": "user(0)", + "displayName": "ali", + "handle": "alice.test", + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], + "viewer": Object { + "blockedBy": false, + "muted": false, + }, + }, + "cid": "cids(3)", + "indexedAt": "1970-01-01T00:00:00.000Z", + "labels": Array [], "likeCount": 3, "record": Object { "$type": "app.bsky.feed.post", @@ -1791,7 +2030,7 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, "reason": Object { @@ -1799,10 +2038,18 @@ Object { "by": Object { "did": "user(2)", "handle": "dan.test", - "labels": Array [], + "labels": Array [ + Object { + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "user(2)", + "val": "repo-action-label", + }, + ], "viewer": Object { "blockedBy": false, - "following": "record(1)", + "following": "record(4)", "muted": false, }, }, @@ -1814,30 +2061,38 @@ Object { "author": Object { "did": "user(2)", "handle": "dan.test", - "labels": Array [], + "labels": Array [ + Object { + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "user(2)", + "val": "repo-action-label", + }, + ], "viewer": Object { "blockedBy": false, - "following": "record(1)", + "following": "record(4)", "muted": false, }, }, - "cid": "cids(2)", + "cid": "cids(7)", "embed": Object { "$type": "app.bsky.embed.record#view", "record": Object { "$type": "app.bsky.embed.record#viewRecord", "author": Object { - "did": "user(3)", + "did": "user(5)", "handle": "carol.test", "labels": Array [], "viewer": Object { "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", + "followedBy": "record(11)", + "following": "record(10)", "muted": false, }, }, - "cid": "cids(3)", + "cid": "cids(8)", "embeds": Array [ Object { "$type": "app.bsky.embed.recordWithMedia#view", @@ -1845,14 +2100,14 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(4).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(4).jpeg", + "alt": "tests/sample-img/key-landscape-small.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(6)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(6)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5).jpeg", + "alt": "tests/sample-img/key-alt.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(9)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(9)@jpeg", }, ], }, @@ -1860,22 +2115,39 @@ Object { "record": Object { "$type": "app.bsky.embed.record#viewRecord", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(6)", + "cid": "cids(10)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(6)", + "uri": "record(12)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", @@ -1891,7 +2163,7 @@ Object { ], "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(3)", + "uri": "record(9)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", @@ -1901,23 +2173,23 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(4)", + "$link": "cids(6)", }, "size": 4114, }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(5)", + "$link": "cids(9)", }, "size": 12736, }, @@ -1926,8 +2198,8 @@ Object { }, "record": Object { "record": Object { - "cid": "cids(6)", - "uri": "record(6)", + "cid": "cids(10)", + "uri": "record(12)", }, }, }, @@ -1944,8 +2216,8 @@ Object { "embed": Object { "$type": "app.bsky.embed.record", "record": Object { - "cid": "cids(3)", - "uri": "record(3)", + "cid": "cids(8)", + "uri": "record(9)", }, }, "facets": Array [ @@ -1966,19 +2238,19 @@ Object { }, "replyCount": 0, "repostCount": 1, - "uri": "record(2)", + "uri": "record(8)", "viewer": Object {}, }, "reason": Object { "$type": "app.bsky.feed.defs#reasonRepost", "by": Object { - "did": "user(3)", + "did": "user(5)", "handle": "carol.test", "labels": Array [], "viewer": Object { "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", + "followedBy": "record(11)", + "following": "record(10)", "muted": false, }, }, @@ -1988,17 +2260,34 @@ Object { Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(7)", + "cid": "cids(0)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 0, @@ -2007,64 +2296,81 @@ Object { "createdAt": "1970-01-01T00:00:00.000Z", "reply": Object { "parent": Object { - "cid": "cids(8)", - "uri": "record(10)", + "cid": "cids(4)", + "uri": "record(3)", }, "root": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, }, "text": "thanks bob", }, "replyCount": 0, - "repostCount": 0, - "uri": "record(9)", + "repostCount": 1, + "uri": "record(0)", "viewer": Object {}, }, "reply": Object { "parent": Object { "$type": "app.bsky.feed.defs#postView", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(8)", + "cid": "cids(4)", "embed": Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(4).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(4).jpeg", + "alt": "tests/sample-img/key-landscape-small.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(6)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(6)@jpeg", }, ], }, "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [ Object { - "cid": "cids(8)", + "cid": "cids(4)", "cts": "1970-01-01T00:00:00.000Z", "neg": false, "src": "did:example:labeler", - "uri": "record(10)", + "uri": "record(3)", "val": "test-label", }, Object { - "cid": "cids(8)", + "cid": "cids(4)", "cts": "1970-01-01T00:00:00.000Z", "neg": false, "src": "did:example:labeler", - "uri": "record(10)", + "uri": "record(3)", "val": "test-label-2", }, ], @@ -2076,12 +2382,12 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(4)", + "$link": "cids(6)", }, "size": 4114, }, @@ -2090,35 +2396,52 @@ Object { }, "reply": Object { "parent": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, "root": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, }, "text": "hear that label_me label_me_2", }, "replyCount": 1, "repostCount": 0, - "uri": "record(10)", + "uri": "record(3)", "viewer": Object {}, }, "root": Object { "$type": "app.bsky.feed.defs#postView", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(0)", + "cid": "cids(3)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 3, @@ -2129,7 +2452,7 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, }, @@ -2137,17 +2460,17 @@ Object { Object { "post": Object { "author": Object { - "did": "user(3)", + "did": "user(5)", "handle": "carol.test", "labels": Array [], "viewer": Object { "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", + "followedBy": "record(11)", + "following": "record(10)", "muted": false, }, }, - "cid": "cids(9)", + "cid": "cids(11)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 0, @@ -2156,36 +2479,53 @@ Object { "createdAt": "1970-01-01T00:00:00.000Z", "reply": Object { "parent": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, "root": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, }, "text": "of course", }, "replyCount": 0, "repostCount": 0, - "uri": "record(11)", + "uri": "record(13)", "viewer": Object {}, }, "reply": Object { "parent": Object { "$type": "app.bsky.feed.defs#postView", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(0)", + "cid": "cids(3)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 3, @@ -2196,23 +2536,40 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, "root": Object { "$type": "app.bsky.feed.defs#postView", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(0)", + "cid": "cids(3)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 3, @@ -2223,7 +2580,7 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, }, @@ -2231,45 +2588,62 @@ Object { Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(8)", + "cid": "cids(4)", "embed": Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(5)/cids(4).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(5)/cids(4).jpeg", + "alt": "tests/sample-img/key-landscape-small.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(4)/cids(6)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(4)/cids(6)@jpeg", }, ], }, "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [ Object { - "cid": "cids(8)", + "cid": "cids(4)", "cts": "1970-01-01T00:00:00.000Z", "neg": false, "src": "did:example:labeler", - "uri": "record(10)", + "uri": "record(3)", "val": "test-label", }, Object { - "cid": "cids(8)", + "cid": "cids(4)", "cts": "1970-01-01T00:00:00.000Z", "neg": false, "src": "did:example:labeler", - "uri": "record(10)", + "uri": "record(3)", "val": "test-label-2", }, ], @@ -2281,12 +2655,12 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(4)", + "$link": "cids(6)", }, "size": 4114, }, @@ -2295,36 +2669,53 @@ Object { }, "reply": Object { "parent": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, "root": Object { - "cid": "cids(0)", - "uri": "record(0)", + "cid": "cids(3)", + "uri": "record(2)", }, }, "text": "hear that label_me label_me_2", }, "replyCount": 1, "repostCount": 0, - "uri": "record(10)", + "uri": "record(3)", "viewer": Object {}, }, "reply": Object { "parent": Object { "$type": "app.bsky.feed.defs#postView", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(0)", + "cid": "cids(3)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 3, @@ -2335,23 +2726,40 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, "root": Object { "$type": "app.bsky.feed.defs#postView", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(0)", + "cid": "cids(3)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 3, @@ -2362,7 +2770,7 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, }, @@ -2370,17 +2778,34 @@ Object { Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(10)", + "cid": "cids(12)", "embed": Object { "$type": "app.bsky.embed.record#view", "record": Object { @@ -2388,34 +2813,42 @@ Object { "author": Object { "did": "user(2)", "handle": "dan.test", - "labels": Array [], + "labels": Array [ + Object { + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "user(2)", + "val": "repo-action-label", + }, + ], "viewer": Object { "blockedBy": false, - "following": "record(1)", + "following": "record(4)", "muted": false, }, }, - "cid": "cids(2)", + "cid": "cids(7)", "embeds": Array [ Object { "$type": "app.bsky.embed.record#view", "record": Object { "$type": "app.bsky.embed.record#viewRecord", "author": Object { - "did": "user(3)", + "did": "user(5)", "handle": "carol.test", "labels": Array [], "viewer": Object { "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", + "followedBy": "record(11)", + "following": "record(10)", "muted": false, }, }, - "cid": "cids(3)", + "cid": "cids(8)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(3)", + "uri": "record(9)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", @@ -2425,23 +2858,23 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(4)", + "$link": "cids(6)", }, "size": 4114, }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(5)", + "$link": "cids(9)", }, "size": 12736, }, @@ -2450,8 +2883,8 @@ Object { }, "record": Object { "record": Object { - "cid": "cids(6)", - "uri": "record(6)", + "cid": "cids(10)", + "uri": "record(12)", }, }, }, @@ -2462,15 +2895,15 @@ Object { ], "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(2)", + "uri": "record(8)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", "embed": Object { "$type": "app.bsky.embed.record", "record": Object { - "cid": "cids(3)", - "uri": "record(3)", + "cid": "cids(8)", + "uri": "record(9)", }, }, "facets": Array [ @@ -2494,11 +2927,11 @@ Object { "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [ Object { - "cid": "cids(10)", + "cid": "cids(12)", "cts": "1970-01-01T00:00:00.000Z", "neg": false, "src": "did:example:labeler", - "uri": "record(12)", + "uri": "record(14)", "val": "test-label", }, ], @@ -2509,34 +2942,51 @@ Object { "embed": Object { "$type": "app.bsky.embed.record", "record": Object { - "cid": "cids(2)", - "uri": "record(2)", + "cid": "cids(7)", + "uri": "record(8)", }, }, "text": "yoohoo label_me", }, "replyCount": 0, "repostCount": 0, - "uri": "record(12)", + "uri": "record(14)", "viewer": Object {}, }, }, Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(11)", + "cid": "cids(13)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 0, @@ -2547,24 +2997,41 @@ Object { }, "replyCount": 0, "repostCount": 0, - "uri": "record(13)", + "uri": "record(15)", "viewer": Object {}, }, }, Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(0)", + "cid": "cids(3)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 3, @@ -2575,7 +3042,7 @@ Object { }, "replyCount": 2, "repostCount": 1, - "uri": "record(0)", + "uri": "record(2)", "viewer": Object {}, }, }, @@ -2584,30 +3051,38 @@ Object { "author": Object { "did": "user(2)", "handle": "dan.test", - "labels": Array [], + "labels": Array [ + Object { + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "user(2)", + "val": "repo-action-label", + }, + ], "viewer": Object { "blockedBy": false, - "following": "record(1)", + "following": "record(4)", "muted": false, }, }, - "cid": "cids(2)", + "cid": "cids(7)", "embed": Object { "$type": "app.bsky.embed.record#view", "record": Object { "$type": "app.bsky.embed.record#viewRecord", "author": Object { - "did": "user(3)", + "did": "user(5)", "handle": "carol.test", "labels": Array [], "viewer": Object { "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", + "followedBy": "record(11)", + "following": "record(10)", "muted": false, }, }, - "cid": "cids(3)", + "cid": "cids(8)", "embeds": Array [ Object { "$type": "app.bsky.embed.recordWithMedia#view", @@ -2615,14 +3090,14 @@ Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(4).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(4).jpeg", + "alt": "tests/sample-img/key-landscape-small.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(6)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(6)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5).jpeg", + "alt": "tests/sample-img/key-alt.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(9)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(9)@jpeg", }, ], }, @@ -2630,22 +3105,39 @@ Object { "record": Object { "$type": "app.bsky.embed.record#viewRecord", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(6)", + "cid": "cids(10)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(6)", + "uri": "record(12)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", @@ -2661,7 +3153,7 @@ Object { ], "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(3)", + "uri": "record(9)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", @@ -2671,23 +3163,23 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(4)", + "$link": "cids(6)", }, "size": 4114, }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(5)", + "$link": "cids(9)", }, "size": 12736, }, @@ -2696,8 +3188,8 @@ Object { }, "record": Object { "record": Object { - "cid": "cids(6)", - "uri": "record(6)", + "cid": "cids(10)", + "uri": "record(12)", }, }, }, @@ -2714,8 +3206,8 @@ Object { "embed": Object { "$type": "app.bsky.embed.record", "record": Object { - "cid": "cids(3)", - "uri": "record(3)", + "cid": "cids(8)", + "uri": "record(9)", }, }, "facets": Array [ @@ -2736,7 +3228,7 @@ Object { }, "replyCount": 0, "repostCount": 1, - "uri": "record(2)", + "uri": "record(8)", "viewer": Object {}, }, }, @@ -2745,14 +3237,22 @@ Object { "author": Object { "did": "user(2)", "handle": "dan.test", - "labels": Array [], + "labels": Array [ + Object { + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "did:example:labeler", + "uri": "user(2)", + "val": "repo-action-label", + }, + ], "viewer": Object { "blockedBy": false, - "following": "record(1)", + "following": "record(4)", "muted": false, }, }, - "cid": "cids(12)", + "cid": "cids(14)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 0, @@ -2763,38 +3263,38 @@ Object { }, "replyCount": 0, "repostCount": 0, - "uri": "record(14)", + "uri": "record(16)", "viewer": Object {}, }, }, Object { "post": Object { "author": Object { - "did": "user(3)", + "did": "user(5)", "handle": "carol.test", "labels": Array [], "viewer": Object { "blockedBy": false, - "followedBy": "record(5)", - "following": "record(4)", + "followedBy": "record(11)", + "following": "record(10)", "muted": false, }, }, - "cid": "cids(3)", + "cid": "cids(8)", "embed": Object { "$type": "app.bsky.embed.recordWithMedia#view", "media": Object { "$type": "app.bsky.embed.images#view", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(4).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(4).jpeg", + "alt": "tests/sample-img/key-landscape-small.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(6)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(6)@jpeg", }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", - "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(5).jpeg", - "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(5).jpeg", + "alt": "tests/sample-img/key-alt.jpg", + "fullsize": "https://bsky.public.url/img/feed_fullsize/plain/user(6)/cids(9)@jpeg", + "thumb": "https://bsky.public.url/img/feed_thumbnail/plain/user(6)/cids(9)@jpeg", }, ], }, @@ -2802,23 +3302,40 @@ Object { "record": Object { "$type": "app.bsky.embed.record#viewRecord", "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(6)", + "cid": "cids(10)", "embeds": Array [], "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], - "uri": "record(6)", + "uri": "record(12)", "value": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", @@ -2843,23 +3360,23 @@ Object { "$type": "app.bsky.embed.images", "images": Array [ Object { - "alt": "tests/image/fixtures/key-landscape-small.jpg", + "alt": "tests/sample-img/key-landscape-small.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(4)", + "$link": "cids(6)", }, "size": 4114, }, }, Object { - "alt": "tests/image/fixtures/key-alt.jpg", + "alt": "tests/sample-img/key-alt.jpg", "image": Object { "$type": "blob", "mimeType": "image/jpeg", "ref": Object { - "$link": "cids(5)", + "$link": "cids(9)", }, "size": 12736, }, @@ -2868,8 +3385,8 @@ Object { }, "record": Object { "record": Object { - "cid": "cids(6)", - "uri": "record(6)", + "cid": "cids(10)", + "uri": "record(12)", }, }, }, @@ -2877,28 +3394,45 @@ Object { }, "replyCount": 0, "repostCount": 0, - "uri": "record(3)", + "uri": "record(9)", "viewer": Object { - "like": "record(15)", + "like": "record(17)", }, }, }, Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(5)/cids(1).jpeg", - "did": "user(4)", + "avatar": "https://bsky.public.url/img/avatar/plain/user(4)/cids(1)@jpeg", + "did": "user(3)", "displayName": "bobby", "handle": "bob.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-a", + }, + Object { + "cid": "cids(5)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(3)", + "uri": "record(7)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, - "followedBy": "record(8)", - "following": "record(7)", + "followedBy": "record(6)", + "following": "record(5)", "muted": false, }, }, - "cid": "cids(6)", + "cid": "cids(10)", "indexedAt": "1970-01-01T00:00:00.000Z", "labels": Array [], "likeCount": 0, @@ -2913,35 +3447,69 @@ Object { }, "replyCount": 0, "repostCount": 0, - "uri": "record(6)", + "uri": "record(12)", "viewer": Object {}, }, }, Object { "post": Object { "author": Object { - "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1).jpeg", + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", "did": "user(0)", "displayName": "ali", "handle": "alice.test", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], "viewer": Object { "blockedBy": false, "muted": false, }, }, - "cid": "cids(13)", + "cid": "cids(15)", "indexedAt": "1970-01-01T00:00:00.000Z", - "labels": Array [], + "labels": Array [ + Object { + "cid": "cids(15)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(18)", + "val": "self-label", + }, + ], "likeCount": 0, "record": Object { "$type": "app.bsky.feed.post", "createdAt": "1970-01-01T00:00:00.000Z", + "labels": Object { + "$type": "com.atproto.label.defs#selfLabels", + "values": Array [ + Object { + "val": "self-label", + }, + ], + }, "text": "hey there", }, "replyCount": 0, "repostCount": 0, - "uri": "record(16)", + "uri": "record(18)", "viewer": Object {}, }, }, @@ -3406,3 +3974,51 @@ Object { ], } `; + +exports[`proxies view requests unspecced.getPopularFeedGenerators 1`] = ` +Object { + "cursor": "0000000000000::bafycid", + "feeds": Array [ + Object { + "cid": "cids(0)", + "creator": Object { + "avatar": "https://bsky.public.url/img/avatar/plain/user(1)/cids(1)@jpeg", + "description": "its me!", + "did": "user(0)", + "displayName": "ali", + "handle": "alice.test", + "indexedAt": "1970-01-01T00:00:00.000Z", + "labels": Array [ + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-a", + }, + Object { + "cid": "cids(2)", + "cts": "1970-01-01T00:00:00.000Z", + "neg": false, + "src": "user(0)", + "uri": "record(1)", + "val": "self-label-b", + }, + ], + "viewer": Object { + "blockedBy": false, + "muted": false, + }, + }, + "description": "Provides all feed candidates", + "did": "did:example:feedgen", + "displayName": "All", + "indexedAt": "1970-01-01T00:00:00.000Z", + "likeCount": 0, + "uri": "record(0)", + "viewer": Object {}, + }, + ], +} +`; diff --git a/packages/pds/tests/proxied/admin.test.ts b/packages/pds/tests/proxied/admin.test.ts index 98ec8f1f70c..23c801cd6b2 100644 --- a/packages/pds/tests/proxied/admin.test.ts +++ b/packages/pds/tests/proxied/admin.test.ts @@ -1,6 +1,5 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { REASONOTHER, @@ -29,7 +28,7 @@ describe('proxies admin requests', () => { }, }) agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() const { data: invite } = await agent.api.com.atproto.server.createInviteCode( { useCount: 10 }, @@ -245,7 +244,6 @@ describe('proxies admin requests', () => { }) it('takesdown and labels repos, and reverts.', async () => { - const { db, services } = network.bsky.ctx // takedown repo const { data: action } = await agent.api.com.atproto.admin.takeModerationAction( @@ -269,16 +267,12 @@ describe('proxies admin requests', () => { const tryGetProfileAppview = agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, { - headers: { ...sc.getHeaders(sc.dids.carol), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(sc.dids.carol) }, }, ) await expect(tryGetProfileAppview).rejects.toThrow( 'Account has been taken down', ) - const labelsA = await services - .label(db.getPrimary()) - .getLabels(sc.dids.alice, { includeNeg: false, skipCache: true }) - expect(labelsA.map((l) => l.val)).toEqual(['dogs']) // reverse action await agent.api.com.atproto.admin.reverseModerationAction( { id: action.id, createdBy: 'did:example:admin', reason: 'X' }, @@ -291,20 +285,15 @@ describe('proxies admin requests', () => { const { data: profileAppview } = await agent.api.app.bsky.actor.getProfile( { actor: sc.dids.alice }, { - headers: { ...sc.getHeaders(sc.dids.carol), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(sc.dids.carol) }, }, ) expect(profileAppview).toEqual( expect.objectContaining({ did: sc.dids.alice, handle: 'alice.test' }), ) - const labelsB = await services - .label(db.getPrimary()) - .getLabels(sc.dids.alice, { includeNeg: false, skipCache: true }) - expect(labelsB.map((l) => l.val)).toEqual(['cats']) }) it('takesdown and labels records, and reverts.', async () => { - const { db, services } = network.bsky.ctx const post = sc.posts[sc.dids.alice][0] // takedown post const { data: action } = @@ -327,17 +316,11 @@ describe('proxies admin requests', () => { }, ) // check thread and labels - const tryGetPostAppview = agent.api.app.bsky.feed.getPostThread( + const tryGetPost = agent.api.app.bsky.feed.getPostThread( { uri: post.ref.uriStr, depth: 0 }, - { - headers: { ...sc.getHeaders(sc.dids.carol), 'x-appview-proxy': 'true' }, - }, + { headers: sc.getHeaders(sc.dids.carol) }, ) - await expect(tryGetPostAppview).rejects.toThrow(NotFoundError) - const labelsA = await services - .label(db.getPrimary()) - .getLabels(post.ref.uriStr, { includeNeg: false, skipCache: true }) - expect(labelsA.map((l) => l.val)).toEqual(['dogs']) + await expect(tryGetPost).rejects.toThrow(NotFoundError) // reverse action await agent.api.com.atproto.admin.reverseModerationAction( { id: action.id, createdBy: 'did:example:admin', reason: 'X' }, @@ -350,16 +333,12 @@ describe('proxies admin requests', () => { const { data: threadAppview } = await agent.api.app.bsky.feed.getPostThread( { uri: post.ref.uriStr, depth: 0 }, { - headers: { ...sc.getHeaders(sc.dids.carol), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(sc.dids.carol) }, }, ) expect(threadAppview.thread.post).toEqual( expect.objectContaining({ uri: post.ref.uriStr, cid: post.ref.cidStr }), ) - const labelsB = await services - .label(db.getPrimary()) - .getLabels(post.ref.uriStr, { includeNeg: false, skipCache: true }) - expect(labelsB.map((l) => l.val)).toEqual(['cats']) }) it('does not persist actions and reports on pds.', async () => { diff --git a/packages/pds/tests/proxied/feedgen.test.ts b/packages/pds/tests/proxied/feedgen.test.ts index 44a89e0b34c..142d1235497 100644 --- a/packages/pds/tests/proxied/feedgen.test.ts +++ b/packages/pds/tests/proxied/feedgen.test.ts @@ -1,7 +1,6 @@ import { makeAlgos } from '@atproto/bsky' import AtpAgent, { AtUri, FeedNS } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { forSnapshot } from '../_util' @@ -23,7 +22,7 @@ describe('feedgen proxy view', () => { bsky: { algos: makeAlgos(feedUri.host) }, }) agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await basicSeed(sc) // publish feed const feed = await agent.api.app.bsky.feed.generator.create( @@ -69,7 +68,7 @@ describe('feedgen proxy view', () => { const { data: feed } = await agent.api.app.bsky.feed.getFeed( { feed: feedUri.toString() }, { - headers: { ...sc.getHeaders(sc.dids.alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(sc.dids.alice) }, }, ) expect(forSnapshot(feed)).toMatchSnapshot() diff --git a/packages/pds/tests/proxied/notif.test.ts b/packages/pds/tests/proxied/notif.test.ts index 106620d4bcd..fb3de2b8fe7 100644 --- a/packages/pds/tests/proxied/notif.test.ts +++ b/packages/pds/tests/proxied/notif.test.ts @@ -3,9 +3,8 @@ import http from 'http' import { AddressInfo } from 'net' import express from 'express' import AtpAgent from '@atproto/api' -import { TestNetworkNoAppView } from '@atproto/dev-env' +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import { verifyJwt } from '@atproto/xrpc-server' -import { SeedClient } from '../seeds/client' import usersSeed from '../seeds/users' import { createServer } from '../../src/lexicon' @@ -24,7 +23,7 @@ describe('notif service proxy', () => { network.pds.server.app.get const plc = network.plc.getClient() agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await usersSeed(sc) await network.processAll() // piggybacking existing plc did, turn it into a notif service diff --git a/packages/pds/tests/proxied/procedures.test.ts b/packages/pds/tests/proxied/procedures.test.ts index 4df2b92356a..00dd02863ce 100644 --- a/packages/pds/tests/proxied/procedures.test.ts +++ b/packages/pds/tests/proxied/procedures.test.ts @@ -1,6 +1,5 @@ import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' describe('proxies appview procedures', () => { @@ -17,10 +16,9 @@ describe('proxies appview procedures', () => { dbPostgresSchema: 'proxy_procedures', }) agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() - agent.api.setHeader('x-appview-proxy', 'true') alice = sc.dids.alice bob = sc.dids.bob carol = sc.dids.carol @@ -93,6 +91,8 @@ describe('proxies appview procedures', () => { }, sc.getHeaders(carol), ) + await network.processAll() + // mute lists await agent.api.app.bsky.graph.muteActorList( { list: bobList.uri }, diff --git a/packages/pds/tests/proxied/read-after-write.test.ts b/packages/pds/tests/proxied/read-after-write.test.ts index bc0a7d49681..34f1e4b71dd 100644 --- a/packages/pds/tests/proxied/read-after-write.test.ts +++ b/packages/pds/tests/proxied/read-after-write.test.ts @@ -1,7 +1,6 @@ import util from 'util' import AtpAgent from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { RecordRef, SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient, RecordRef } from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { ThreadViewPost } from '../../src/lexicon/types/app/bsky/feed/defs' import { View as RecordEmbedView } from '../../src/lexicon/types/app/bsky/embed/record' @@ -19,7 +18,7 @@ describe('proxy read after write', () => { dbPostgresSchema: 'proxy_read_after_write', }) agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await basicSeed(sc) await network.processAll() alice = sc.dids.alice @@ -35,7 +34,7 @@ describe('proxy read after write', () => { await sc.updateProfile(alice, { displayName: 'blah' }) const res = await agent.api.app.bsky.actor.getProfile( { actor: alice }, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) expect(res.data.displayName).toEqual('blah') expect(res.data.description).toBeUndefined() @@ -44,18 +43,18 @@ describe('proxy read after write', () => { it('handles image formatting', async () => { const blob = await sc.uploadFile( alice, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) await sc.updateProfile(alice, { displayName: 'blah', avatar: blob.image }) const res = await agent.api.app.bsky.actor.getProfile( { actor: alice }, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) expect(res.data.avatar).toEqual( util.format( - network.pds.ctx.cfg.bskyAppViewCdnUrlPattern, + network.pds.ctx.cfg.bskyAppView.cdnUrlPattern, 'avatar', alice, blob.image.ref.toString(), @@ -66,7 +65,7 @@ describe('proxy read after write', () => { it('handles read after write on getAuthorFeed', async () => { const res = await agent.api.app.bsky.feed.getAuthorFeed( { actor: alice }, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) for (const item of res.data.feed) { if (item.post.author.did === alice) { @@ -95,7 +94,7 @@ describe('proxy read after write', () => { replyRef2 = reply2.ref const res = await agent.api.app.bsky.feed.getPostThread( { uri: sc.posts[alice][0].ref.uriStr }, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) const layerOne = res.data.thread.replies as ThreadViewPost[] expect(layerOne.length).toBe(1) @@ -108,7 +107,7 @@ describe('proxy read after write', () => { it('handles read after write on a thread that is not found on appview', async () => { const res = await agent.api.app.bsky.feed.getPostThread( { uri: replyRef1.uriStr }, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) const thread = res.data.thread as ThreadViewPost expect(thread.post.uri).toEqual(replyRef1.uriStr) @@ -140,7 +139,7 @@ describe('proxy read after write', () => { ) const res = await agent.api.app.bsky.feed.getPostThread( { uri: sc.posts[carol][0].ref.uriStr }, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) const replies = res.data.thread.replies as ThreadViewPost[] expect(replies.length).toBe(1) @@ -160,7 +159,7 @@ describe('proxy read after write', () => { ) const res = await agent.api.app.bsky.feed.getTimeline( {}, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) expect(res.data.feed[0].post.uri).toEqual(postRes.uri) }) @@ -168,7 +167,7 @@ describe('proxy read after write', () => { it('returns lag headers', async () => { const res = await agent.api.app.bsky.feed.getTimeline( {}, - { headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' } }, + { headers: { ...sc.getHeaders(alice) } }, ) const lag = res.headers['atproto-upstream-lag'] expect(lag).toBeDefined() diff --git a/packages/pds/tests/proxied/views.test.ts b/packages/pds/tests/proxied/views.test.ts index 7b81ee8bec3..13fa41174b4 100644 --- a/packages/pds/tests/proxied/views.test.ts +++ b/packages/pds/tests/proxied/views.test.ts @@ -1,6 +1,5 @@ import AtpAgent, { AtUri } from '@atproto/api' -import { TestNetwork } from '@atproto/dev-env' -import { SeedClient } from '../seeds/client' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import basicSeed from '../seeds/basic' import { forSnapshot } from '../_util' @@ -19,7 +18,7 @@ describe('proxies view requests', () => { dbPostgresSchema: 'proxy_views', }) agent = network.pds.getClient() - sc = new SeedClient(agent) + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob @@ -31,6 +30,20 @@ describe('proxies view requests', () => { await network.processAll() }) + beforeAll(async () => { + await agent.api.app.bsky.feed.generator.create( + { repo: alice, rkey: 'all' }, + { + did: 'did:example:feedgen', + displayName: 'All', + description: 'Provides all feed candidates', + createdAt: new Date().toISOString(), + }, + sc.getHeaders(alice), + ) + await network.processAll() + }) + afterAll(async () => { await network.close() }) @@ -41,7 +54,7 @@ describe('proxies view requests', () => { actor: bob, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -53,7 +66,7 @@ describe('proxies view requests', () => { actors: [alice, bob], }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -75,7 +88,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.actor.getSuggestions( {}, { - headers: { ...sc.getHeaders(carol), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(carol) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -84,7 +97,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(carol), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(carol) }, }, ) const pt2 = await agent.api.app.bsky.actor.getSuggestions( @@ -92,7 +105,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(carol), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(carol) }, }, ) expect([...pt1.data.actors, ...pt2.data.actors]).toEqual(res.data.actors) @@ -104,7 +117,7 @@ describe('proxies view requests', () => { term: '.test', }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) // sort because pagination is done off of did @@ -118,7 +131,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.actor.searchActors( @@ -127,7 +140,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const sortedPaginated = [...pt1.data.actors, ...pt2.data.actors].sort( @@ -142,7 +155,7 @@ describe('proxies view requests', () => { term: '.test', }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const sorted = res.data.actors.sort((a, b) => @@ -157,7 +170,7 @@ describe('proxies view requests', () => { actor: bob, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -167,7 +180,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.feed.getAuthorFeed( @@ -176,7 +189,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.feed, ...pt2.data.feed]).toEqual(res.data.feed) @@ -221,7 +234,7 @@ describe('proxies view requests', () => { uri: postUri, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -231,7 +244,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.feed.getLikes( @@ -240,7 +253,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.likes, ...pt2.data.likes]).toEqual(res.data.likes) @@ -253,7 +266,7 @@ describe('proxies view requests', () => { uri: postUri, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -263,7 +276,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.feed.getRepostedBy( @@ -272,7 +285,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.repostedBy, ...pt2.data.repostedBy]).toEqual( @@ -287,18 +300,17 @@ describe('proxies view requests', () => { uris, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() }) - // @TODO re-enable when proxying is a full-proxy - it.skip('feed.getTimeline', async () => { + it('feed.getTimeline', async () => { const res = await agent.api.app.bsky.feed.getTimeline( {}, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) @@ -308,7 +320,7 @@ describe('proxies view requests', () => { limit: 2, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.feed.getTimeline( @@ -316,12 +328,22 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.feed, ...pt2.data.feed]).toEqual(res.data.feed) }) + it('unspecced.getPopularFeedGenerators', async () => { + const res = await agent.api.app.bsky.unspecced.getPopularFeedGenerators( + {}, + { + headers: { ...sc.getHeaders(alice) }, + }, + ) + expect(forSnapshot(res.data)).toMatchSnapshot() + }) + let feedUri: string it('feed.getFeedGenerator', async () => { feedUri = AtUri.make( @@ -350,7 +372,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.feed.getFeedGenerator( { feed: feedUri }, { - headers: { ...sc.getHeaders(sc.dids.alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(sc.dids.alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -360,7 +382,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.feed.getFeedGenerators( { feeds: [feedUri.toString()] }, { - headers: { ...sc.getHeaders(sc.dids.alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(sc.dids.alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -373,7 +395,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.graph.getBlocks( {}, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -382,7 +404,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.graph.getBlocks( @@ -390,7 +412,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.blocks, ...pt2.data.blocks]).toEqual(res.data.blocks) @@ -403,7 +425,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.graph.getFollows( { actor: bob }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -413,7 +435,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.graph.getFollows( @@ -422,7 +444,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.follows, ...pt2.data.follows]).toEqual(res.data.follows) @@ -432,7 +454,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.graph.getFollowers( { actor: bob }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -442,7 +464,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.graph.getFollowers( @@ -451,7 +473,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.followers, ...pt2.data.followers]).toEqual( @@ -506,7 +528,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.graph.getList( { list: listUri }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -516,7 +538,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.graph.getList( @@ -525,7 +547,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.items, ...pt2.data.items]).toEqual(res.data.items) @@ -535,7 +557,7 @@ describe('proxies view requests', () => { const res = await agent.api.app.bsky.graph.getLists( { actor: bob }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect(forSnapshot(res.data)).toMatchSnapshot() @@ -545,7 +567,7 @@ describe('proxies view requests', () => { limit: 1, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) const pt2 = await agent.api.app.bsky.graph.getLists( @@ -554,7 +576,7 @@ describe('proxies view requests', () => { cursor: pt1.data.cursor, }, { - headers: { ...sc.getHeaders(alice), 'x-appview-proxy': 'true' }, + headers: { ...sc.getHeaders(alice) }, }, ) expect([...pt1.data.lists, ...pt2.data.lists]).toEqual(res.data.lists) diff --git a/packages/pds/tests/races.test.ts b/packages/pds/tests/races.test.ts index 7f276e61147..220e9c252c8 100644 --- a/packages/pds/tests/races.test.ts +++ b/packages/pds/tests/races.test.ts @@ -1,25 +1,24 @@ import AtpAgent from '@atproto/api' -import { CloseFn, runTestServer } from './_util' +import { wait } from '@atproto/common' +import { TestNetworkNoAppView } from '@atproto/dev-env' +import { CommitData, readCarWithRoot, verifyRepo } from '@atproto/repo' import AppContext from '../src/context' import { PreparedWrite, prepareCreate } from '../src/repo' -import { wait } from '@atproto/common' import SqlRepoStorage from '../src/sql-repo-storage' -import { CommitData, readCarWithRoot, verifyRepo } from '@atproto/repo' import { ConcurrentWriteError } from '../src/services/repo' describe('crud operations', () => { + let network: TestNetworkNoAppView let ctx: AppContext let agent: AtpAgent let did: string - let close: CloseFn beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'races', }) - ctx = server.ctx - close = server.close - agent = new AtpAgent({ service: server.url }) + ctx = network.pds.ctx + agent = network.pds.getClient() await agent.createAccount({ email: 'alice@test.com', handle: 'alice.test', @@ -29,7 +28,7 @@ describe('crud operations', () => { }) afterAll(async () => { - await close() + await network.close() }) const formatWrite = async () => { diff --git a/packages/pds/tests/rate-limits.test.ts b/packages/pds/tests/rate-limits.test.ts index 6f7cd77cbb8..3e8703126f3 100644 --- a/packages/pds/tests/rate-limits.test.ts +++ b/packages/pds/tests/rate-limits.test.ts @@ -1,32 +1,33 @@ -import { runTestServer, TestServerInfo } from './_util' -import { SeedClient } from './seeds/client' -import userSeed from './seeds/basic' import { AtpAgent } from '@atproto/api' import { randomStr } from '@atproto/crypto' +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' +import userSeed from './seeds/basic' describe('rate limits', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient let alice: string let bob: string beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'rate_limits', - redisScratchAddress: process.env.REDIS_HOST, - redisScratchPassword: process.env.REDIS_PASSWORD, - rateLimitsEnabled: true, + pds: { + redisScratchAddress: process.env.REDIS_HOST, + redisScratchPassword: process.env.REDIS_PASSWORD, + rateLimitsEnabled: true, + }, }) - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await userSeed(sc) alice = sc.dids.alice bob = sc.dids.bob }) afterAll(async () => { - await server.close() + await network.close() }) it('rate limits by ip', async () => { diff --git a/packages/pds/tests/image/fixtures/at.png b/packages/pds/tests/sample-img/at.png similarity index 100% rename from packages/pds/tests/image/fixtures/at.png rename to packages/pds/tests/sample-img/at.png diff --git a/packages/pds/tests/image/fixtures/hd-key.jpg b/packages/pds/tests/sample-img/hd-key.jpg similarity index 100% rename from packages/pds/tests/image/fixtures/hd-key.jpg rename to packages/pds/tests/sample-img/hd-key.jpg diff --git a/packages/pds/tests/image/fixtures/key-alt.jpg b/packages/pds/tests/sample-img/key-alt.jpg similarity index 100% rename from packages/pds/tests/image/fixtures/key-alt.jpg rename to packages/pds/tests/sample-img/key-alt.jpg diff --git a/packages/pds/tests/image/fixtures/key-landscape-large.jpg b/packages/pds/tests/sample-img/key-landscape-large.jpg similarity index 100% rename from packages/pds/tests/image/fixtures/key-landscape-large.jpg rename to packages/pds/tests/sample-img/key-landscape-large.jpg diff --git a/packages/pds/tests/image/fixtures/key-landscape-small.jpg b/packages/pds/tests/sample-img/key-landscape-small.jpg similarity index 100% rename from packages/pds/tests/image/fixtures/key-landscape-small.jpg rename to packages/pds/tests/sample-img/key-landscape-small.jpg diff --git a/packages/pds/tests/image/fixtures/key-portrait-large.jpg b/packages/pds/tests/sample-img/key-portrait-large.jpg similarity index 100% rename from packages/pds/tests/image/fixtures/key-portrait-large.jpg rename to packages/pds/tests/sample-img/key-portrait-large.jpg diff --git a/packages/pds/tests/image/fixtures/key-portrait-small.jpg b/packages/pds/tests/sample-img/key-portrait-small.jpg similarity index 100% rename from packages/pds/tests/image/fixtures/key-portrait-small.jpg rename to packages/pds/tests/sample-img/key-portrait-small.jpg diff --git a/packages/pds/tests/seeds/basic.ts b/packages/pds/tests/seeds/basic.ts index ec8a5f05c00..3d045fc9239 100644 --- a/packages/pds/tests/seeds/basic.ts +++ b/packages/pds/tests/seeds/basic.ts @@ -1,7 +1,6 @@ +import { SeedClient } from '@atproto/dev-env' import { ids } from '../../src/lexicon/lexicons' import { FLAG } from '../../src/lexicon/types/com/atproto/admin/defs' -import { adminAuth } from '../_util' -import { SeedClient } from './client' import usersSeed from './users' export default async (sc: SeedClient, invite?: { code: string }) => { @@ -36,12 +35,12 @@ export default async (sc: SeedClient, invite?: { code: string }) => { }) const img1 = await sc.uploadFile( carol, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) const img2 = await sc.uploadFile( carol, - 'tests/image/fixtures/key-alt.jpg', + 'tests/sample-img/key-alt.jpg', 'image/jpeg', ) await sc.post( @@ -102,7 +101,7 @@ export default async (sc: SeedClient, invite?: { code: string }) => { const replyImg = await sc.uploadFile( bob, - 'tests/image/fixtures/key-landscape-small.jpg', + 'tests/sample-img/key-landscape-small.jpg', 'image/jpeg', ) await sc.reply( @@ -142,7 +141,7 @@ export default async (sc: SeedClient, invite?: { code: string }) => { }, { encoding: 'application/json', - headers: { authorization: adminAuth() }, + headers: sc.adminAuthHeaders(), }, ) diff --git a/packages/pds/tests/seeds/follows.ts b/packages/pds/tests/seeds/follows.ts index f15156dbff5..1abe555ff00 100644 --- a/packages/pds/tests/seeds/follows.ts +++ b/packages/pds/tests/seeds/follows.ts @@ -1,4 +1,4 @@ -import { SeedClient } from './client' +import { SeedClient } from '@atproto/dev-env' export default async (sc: SeedClient) => { await sc.createAccount('alice', users.alice) diff --git a/packages/pds/tests/seeds/likes.ts b/packages/pds/tests/seeds/likes.ts index 27eeba09c40..c1671652a5e 100644 --- a/packages/pds/tests/seeds/likes.ts +++ b/packages/pds/tests/seeds/likes.ts @@ -1,5 +1,5 @@ +import { SeedClient } from '@atproto/dev-env' import basicSeed from './basic' -import { SeedClient } from './client' export default async (sc: SeedClient) => { await basicSeed(sc) diff --git a/packages/pds/tests/seeds/reposts.ts b/packages/pds/tests/seeds/reposts.ts index 8de9b8ec655..9bb444ec8f2 100644 --- a/packages/pds/tests/seeds/reposts.ts +++ b/packages/pds/tests/seeds/reposts.ts @@ -1,5 +1,5 @@ +import { SeedClient } from '@atproto/dev-env' import basicSeed from './basic' -import { SeedClient } from './client' export default async (sc: SeedClient) => { await basicSeed(sc) diff --git a/packages/pds/tests/seeds/thread.ts b/packages/pds/tests/seeds/thread.ts index 921736e919e..747e2fdfd6e 100644 --- a/packages/pds/tests/seeds/thread.ts +++ b/packages/pds/tests/seeds/thread.ts @@ -1,4 +1,4 @@ -import { RecordRef, SeedClient } from './client' +import { RecordRef, SeedClient } from '@atproto/dev-env' export default async (sc: SeedClient, did, threads: Item[]) => { const refByItemId: Record = {} diff --git a/packages/pds/tests/seeds/users-bulk.ts b/packages/pds/tests/seeds/users-bulk.ts index 5cc6813753c..ec4e4b5a6f7 100644 --- a/packages/pds/tests/seeds/users-bulk.ts +++ b/packages/pds/tests/seeds/users-bulk.ts @@ -1,5 +1,5 @@ import { chunkArray } from '@atproto/common' -import { SeedClient } from './client' +import { SeedClient } from '@atproto/dev-env' export default async (sc: SeedClient, max = Infinity) => { // @TODO when these are run in parallel, seem to get an intermittent diff --git a/packages/pds/tests/seeds/users.ts b/packages/pds/tests/seeds/users.ts index 2ef9a74864f..6f20bf613bb 100644 --- a/packages/pds/tests/seeds/users.ts +++ b/packages/pds/tests/seeds/users.ts @@ -1,4 +1,4 @@ -import { SeedClient } from './client' +import { SeedClient } from '@atproto/dev-env' export default async (sc: SeedClient, invite?: { code: string }) => { await sc.createAccount('alice', { ...users.alice, inviteCode: invite?.code }) diff --git a/packages/pds/tests/sequencer.test.ts b/packages/pds/tests/sequencer.test.ts index c41cedafb58..d48ba1797d6 100644 --- a/packages/pds/tests/sequencer.test.ts +++ b/packages/pds/tests/sequencer.test.ts @@ -1,15 +1,13 @@ -import AtpAgent from '@atproto/api' +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import { randomStr } from '@atproto/crypto' import { cborEncode, readFromGenerator, wait } from '@atproto/common' import { Sequencer, SeqEvt } from '../src/sequencer' import Outbox from '../src/sequencer/outbox' import { Database } from '../src' -import { SeedClient } from './seeds/client' import userSeed from './seeds/users' -import { TestServerInfo, runTestServer } from './_util' describe('sequencer', () => { - let server: TestServerInfo + let network: TestNetworkNoAppView let db: Database let sequencer: Sequencer let sc: SeedClient @@ -20,13 +18,12 @@ describe('sequencer', () => { let lastSeen: number beforeAll(async () => { - server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'sequencer', }) - db = server.ctx.db - sequencer = server.ctx.sequencer - const agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + db = network.pds.ctx.db + sequencer = network.pds.ctx.sequencer + sc = network.getSeedClient() await userSeed(sc) alice = sc.dids.alice bob = sc.dids.bob @@ -35,7 +32,7 @@ describe('sequencer', () => { }) afterAll(async () => { - await server.close() + await network.close() }) const randomPost = async (by: string) => sc.post(by, randomStr(8, 'base32')) @@ -81,7 +78,7 @@ describe('sequencer', () => { const caughtUp = (outbox: Outbox): (() => Promise) => { return async () => { - const leaderCaughtUp = await server.ctx.sequencerLeader?.isCaughtUp() + const leaderCaughtUp = await network.pds.ctx.sequencerLeader?.isCaughtUp() if (!leaderCaughtUp) return false const lastEvt = await outbox.sequencer.curr() if (!lastEvt) return true diff --git a/packages/pds/tests/server.test.ts b/packages/pds/tests/server.test.ts index 3994df3a9ab..23298a7d731 100644 --- a/packages/pds/tests/server.test.ts +++ b/packages/pds/tests/server.test.ts @@ -1,40 +1,40 @@ import { AddressInfo } from 'net' import express from 'express' import axios, { AxiosError } from 'axios' +import { TestNetwork, SeedClient } from '@atproto/dev-env' import AtpAgent, { AtUri } from '@atproto/api' -import { CloseFn, runTestServer, TestServerInfo } from './_util' import { handler as errorHandler } from '../src/error' -import { SeedClient } from './seeds/client' import basicSeed from './seeds/basic' import { Database } from '../src' import { randomStr } from '@atproto/crypto' describe('server', () => { - let server: TestServerInfo - let close: CloseFn + let network: TestNetwork let db: Database let agent: AtpAgent let sc: SeedClient let alice: string beforeAll(async () => { - server = await runTestServer({ + network = await TestNetwork.create({ dbPostgresSchema: 'server', + pds: { + version: '0.0.0', + }, }) - close = server.close - db = server.ctx.db - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + db = network.pds.ctx.db + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice }) afterAll(async () => { - await close() + await network.close() }) it('preserves 404s.', async () => { - const promise = axios.get(`${server.url}/unknown`) + const promise = axios.get(`${network.pds.url}/unknown`) await expect(promise).rejects.toThrow('failed with status code 404') }) @@ -65,7 +65,7 @@ describe('server', () => { let error: AxiosError try { await axios.post( - `${server.url}/xrpc/com.atproto.repo.createRecord`, + `${network.pds.url}/xrpc/com.atproto.repo.createRecord`, { data: 'x'.repeat(100 * 1024), // 100kb }, @@ -106,25 +106,26 @@ describe('server', () => { const uri = new AtUri(createRes.data.uri) const res = await axios.get( - `${server.url}/xrpc/com.atproto.repo.getRecord?repo=${uri.host}&collection=${uri.collection}&rkey=${uri.rkey}`, + `${network.pds.url}/xrpc/com.atproto.repo.getRecord?repo=${uri.host}&collection=${uri.collection}&rkey=${uri.rkey}`, { decompress: false, headers: { ...sc.getHeaders(alice), 'accept-encoding': 'gzip' }, }, ) + expect(res.headers['content-encoding']).toEqual('gzip') }) it('compresses large car file responses', async () => { const res = await axios.get( - `${server.url}/xrpc/com.atproto.sync.getRepo?did=${alice}`, + `${network.pds.url}/xrpc/com.atproto.sync.getRepo?did=${alice}`, { decompress: false, headers: { 'accept-encoding': 'gzip' } }, ) expect(res.headers['content-encoding']).toEqual('gzip') }) it('does not compress small payloads', async () => { - const res = await axios.get(`${server.url}/xrpc/_health`, { + const res = await axios.get(`${network.pds.url}/xrpc/_health`, { decompress: false, headers: { 'accept-encoding': 'gzip' }, }) @@ -132,19 +133,19 @@ describe('server', () => { }) it('healthcheck succeeds when database is available.', async () => { - const { data, status } = await axios.get(`${server.url}/xrpc/_health`) + const { data, status } = await axios.get(`${network.pds.url}/xrpc/_health`) expect(status).toEqual(200) expect(data).toEqual({ version: '0.0.0' }) }) it('healthcheck fails when database is unavailable.', async () => { // destroy to release lock & allow db to close - await server.ctx.sequencerLeader?.destroy() + await network.pds.ctx.sequencerLeader?.destroy() await db.close() let error: AxiosError try { - await axios.get(`${server.url}/xrpc/_health`) + await axios.get(`${network.pds.url}/xrpc/_health`) throw new Error('Healthcheck should have failed') } catch (err) { if (axios.isAxiosError(err)) { diff --git a/packages/pds/tests/sql-repo-storage.test.ts b/packages/pds/tests/sql-repo-storage.test.ts index c19a8b41805..ea63cf07e06 100644 --- a/packages/pds/tests/sql-repo-storage.test.ts +++ b/packages/pds/tests/sql-repo-storage.test.ts @@ -1,25 +1,24 @@ +import { TestNetworkNoAppView } from '@atproto/dev-env' import { range, dataToCborBlock, TID } from '@atproto/common' import { CidSet, def } from '@atproto/repo' import BlockMap from '@atproto/repo/src/block-map' +import { CID } from 'multiformats/cid' import { Database } from '../src' import SqlRepoStorage from '../src/sql-repo-storage' -import { CloseFn, runTestServer } from './_util' -import { CID } from 'multiformats/cid' describe('sql repo storage', () => { + let network: TestNetworkNoAppView let db: Database - let close: CloseFn beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'sql_repo_storage', }) - close = server.close - db = server.ctx.db + db = network.pds.ctx.db }) afterAll(async () => { - await close() + await network.close() }) it('puts and gets blocks.', async () => { @@ -28,7 +27,7 @@ describe('sql repo storage', () => { const cid = await db.transaction(async (dbTxn) => { const storage = new SqlRepoStorage(dbTxn, did) const block = await dataToCborBlock({ my: 'block' }) - await storage.putBlock(block.cid, block.bytes) + await storage.putBlock(block.cid, block.bytes, TID.nextStr()) return block.cid }) @@ -44,14 +43,14 @@ describe('sql repo storage', () => { const cidA = await db.transaction(async (dbTxn) => { const storage = new SqlRepoStorage(dbTxn, did) const block = await dataToCborBlock({ my: 'block' }) - await storage.putBlock(block.cid, block.bytes) + await storage.putBlock(block.cid, block.bytes, TID.nextStr()) return block.cid }) const cidB = await db.transaction(async (dbTxn) => { const storage = new SqlRepoStorage(dbTxn, did) const block = await dataToCborBlock({ my: 'block' }) - await storage.putBlock(block.cid, block.bytes) + await storage.putBlock(block.cid, block.bytes, TID.nextStr()) return block.cid }) @@ -87,13 +86,15 @@ describe('sql repo storage', () => { cid: commits[0].cid, rev: TID.nextStr(), prev: null, + since: null, newBlocks: blocks0, removedCids: new CidSet(), }) await storage.applyCommit({ cid: commits[1].cid, - prev: commits[0].cid, rev: TID.nextStr(), + prev: commits[0].cid, + since: null, newBlocks: blocks1, removedCids: toRemove, }) diff --git a/packages/pds/tests/sync/list.test.ts b/packages/pds/tests/sync/list.test.ts index cbab7126ad3..cd6716e7a11 100644 --- a/packages/pds/tests/sync/list.test.ts +++ b/packages/pds/tests/sync/list.test.ts @@ -1,25 +1,23 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' -import { CloseFn, runTestServer } from '../_util' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' describe('sync listing', () => { + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient - let close: CloseFn beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'sync_list', }) - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) }) afterAll(async () => { - await close() + await network.close() }) it('lists hosted repos in order of creation', async () => { diff --git a/packages/pds/tests/sync/subscribe-repos.test.ts b/packages/pds/tests/sync/subscribe-repos.test.ts index 9c8c98459ce..58745b7fe1e 100644 --- a/packages/pds/tests/sync/subscribe-repos.test.ts +++ b/packages/pds/tests/sync/subscribe-repos.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { cborDecode, @@ -17,14 +18,13 @@ import { Tombstone as TombstoneEvt, } from '../../src/lexicon/types/com/atproto/sync/subscribeRepos' import { AppContext, Database } from '../../src' -import { SeedClient } from '../seeds/client' import basicSeed from '../seeds/basic' -import { CloseFn, runTestServer } from '../_util' import { CID } from 'multiformats/cid' describe('repo subscribe repos', () => { let serverHost: string + let network: TestNetworkNoAppView let db: Database let ctx: AppContext @@ -35,18 +35,18 @@ describe('repo subscribe repos', () => { let carol: string let dan: string - let close: CloseFn - beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'repo_subscribe_repos', + pds: { + repoBackfillLimitMs: HOUR, + }, }) - serverHost = server.url.replace('http://', '') - ctx = server.ctx - db = server.ctx.db - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + serverHost = network.pds.url.replace('http://', '') + ctx = network.pds.ctx + db = network.pds.ctx.db + agent = network.pds.getClient() + sc = network.getSeedClient() await basicSeed(sc) alice = sc.dids.alice bob = sc.dids.bob @@ -55,7 +55,7 @@ describe('repo subscribe repos', () => { }) afterAll(async () => { - await close() + await network.close() }) const getRepo = async (did: string): Promise => { diff --git a/packages/pds/tests/sync/sync.test.ts b/packages/pds/tests/sync/sync.test.ts index 27311cbf81d..424ebc86337 100644 --- a/packages/pds/tests/sync/sync.test.ts +++ b/packages/pds/tests/sync/sync.test.ts @@ -1,3 +1,4 @@ +import { TestNetworkNoAppView, SeedClient } from '@atproto/dev-env' import AtpAgent from '@atproto/api' import { TID } from '@atproto/common' import { randomStr } from '@atproto/crypto' @@ -7,10 +8,9 @@ import { AtUri } from '@atproto/syntax' import { TAKEDOWN } from '@atproto/api/src/client/types/com/atproto/admin/defs' import { CID } from 'multiformats/cid' import { AppContext } from '../../src' -import { adminAuth, CloseFn, runTestServer } from '../_util' -import { SeedClient } from '../seeds/client' describe('repo sync', () => { + let network: TestNetworkNoAppView let agent: AtpAgent let sc: SeedClient let did: string @@ -21,16 +21,13 @@ describe('repo sync', () => { let currRoot: CID | undefined let ctx: AppContext - let close: CloseFn - beforeAll(async () => { - const server = await runTestServer({ + network = await TestNetworkNoAppView.create({ dbPostgresSchema: 'repo_sync', }) - ctx = server.ctx - close = server.close - agent = new AtpAgent({ service: server.url }) - sc = new SeedClient(agent) + ctx = network.pds.ctx + agent = network.pds.getClient() + sc = network.getSeedClient() await sc.createAccount('alice', { email: 'alice@test.com', handle: 'alice.test', @@ -41,7 +38,7 @@ describe('repo sync', () => { }) afterAll(async () => { - await close() + await network.close() }) it('creates and syncs some records', async () => { @@ -229,7 +226,7 @@ describe('repo sync', () => { await expect(tryGetRepoOwner).resolves.toBeDefined() const tryGetRepoAdmin = agent.api.com.atproto.sync.getRepo( { did }, - { headers: { authorization: adminAuth() } }, + { headers: network.pds.adminAuthHeaders() }, ) await expect(tryGetRepoAdmin).resolves.toBeDefined() }) diff --git a/packages/pds/tsconfig.json b/packages/pds/tsconfig.json index 9505b44db69..daf8ee1a04a 100644 --- a/packages/pds/tsconfig.json +++ b/packages/pds/tsconfig.json @@ -9,6 +9,7 @@ "include": ["./src", "__tests__/**/**.ts"], "references": [ { "path": "../api/tsconfig.build.json" }, + { "path": "../aws/tsconfig.build.json" }, { "path": "../common/tsconfig.build.json" }, { "path": "../crypto/tsconfig.build.json" }, { "path": "../identity/tsconfig.build.json" }, diff --git a/packages/repo/src/util.ts b/packages/repo/src/util.ts index 563a848d4ae..89f85a097d5 100644 --- a/packages/repo/src/util.ts +++ b/packages/repo/src/util.ts @@ -95,7 +95,7 @@ export const readCar = async ( const roots = await car.getRoots() const blocks = new BlockMap() for await (const block of verifyIncomingCarBlocks(car.blocks())) { - await blocks.set(block.cid, block.bytes) + blocks.set(block.cid, block.bytes) } return { roots, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fd9ef8cdbad..0e22af0c4e1 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -111,12 +111,12 @@ importers: specifier: ^2.1.0 version: 2.1.0 devDependencies: + '@atproto/dev-env': + specifier: workspace:^ + version: link:../dev-env '@atproto/lex-cli': specifier: workspace:^ version: link:../lex-cli - '@atproto/pds': - specifier: workspace:^ - version: link:../pds common-tags: specifier: ^1.8.2 version: 1.8.2 @@ -192,9 +192,6 @@ importers: cors: specifier: ^2.8.5 version: 2.8.5 - dotenv: - specifier: ^16.0.0 - version: 16.0.3 express: specifier: ^4.17.2 version: 4.18.2 @@ -346,6 +343,9 @@ importers: '@atproto/identity': specifier: workspace:^ version: link:../identity + '@atproto/lexicon': + specifier: workspace:^ + version: link:../lexicon '@atproto/pds': specifier: workspace:^ version: link:../pds @@ -368,7 +368,7 @@ importers: specifier: ^5.0.1 version: 5.1.1 dotenv: - specifier: ^16.0.1 + specifier: ^16.0.3 version: 16.0.3 express: specifier: ^4.18.2 @@ -376,6 +376,9 @@ importers: get-port: specifier: ^6.1.2 version: 6.1.2 + multiformats: + specifier: ^9.9.0 + version: 9.9.0 sharp: specifier: ^0.31.2 version: 0.31.2 @@ -465,6 +468,9 @@ importers: '@atproto/api': specifier: workspace:^ version: link:../api + '@atproto/aws': + specifier: workspace:^ + version: link:../aws '@atproto/common': specifier: workspace:^ version: link:../common @@ -504,9 +510,9 @@ importers: cors: specifier: ^2.8.5 version: 2.8.5 - dotenv: - specifier: ^16.0.0 - version: 16.0.3 + disposable-email: + specifier: ^0.2.3 + version: 0.2.3 express: specifier: ^4.17.2 version: 4.18.2 @@ -586,6 +592,9 @@ importers: '@types/cors': specifier: ^2.8.12 version: 2.8.12 + '@types/disposable-email': + specifier: ^0.2.0 + version: 0.2.0 '@types/express': specifier: ^4.17.13 version: 4.17.13 @@ -610,6 +619,9 @@ importers: axios: specifier: ^0.27.2 version: 0.27.2 + ws: + specifier: ^8.12.0 + version: 8.12.0 packages/repo: dependencies: @@ -5295,6 +5307,10 @@ packages: resolution: {integrity: sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw==} dev: true + /@types/disposable-email@0.2.0: + resolution: {integrity: sha512-i4fPC8+a5j7RlKVe9cOHz6cYVxkSFYuJ78GB3EJPMfBJURWmEsD4gb/gD48j7KAWe0M8ZvdJR6a6GaDohTYttw==} + dev: true + /@types/elliptic@6.4.14: resolution: {integrity: sha512-z4OBcDAU0GVwDTuwJzQCiL6188QvZMkvoERgcVjq0/mPM8jCfdwZ3x5zQEVoL9WCAru3aG5wl3Z5Ww5wBWn7ZQ==} dependencies: @@ -6647,6 +6663,10 @@ packages: path-type: 4.0.0 dev: true + /disposable-email@0.2.3: + resolution: {integrity: sha512-gkBQQ5Res431ZXqLlAafrXHizG7/1FWmi8U2RTtriD78Vc10HhBUvdJun3R4eSF0KRIQQJs+wHlxjkED/Hr1EQ==} + dev: false + /doctrine@3.0.0: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} @@ -11189,7 +11209,6 @@ packages: optional: true utf-8-validate: optional: true - dev: false /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} diff --git a/services/pds/Dockerfile b/services/pds/Dockerfile index c6d7d4351d0..c108df56ddd 100644 --- a/services/pds/Dockerfile +++ b/services/pds/Dockerfile @@ -44,9 +44,11 @@ ENTRYPOINT ["dumb-init", "--"] WORKDIR /app/services/pds COPY --from=build /app /app +RUN mkdir /app/data && chown node /app/data +VOLUME /app/data EXPOSE 3000 -ENV PORT=3000 +ENV PDS_PORT=3000 ENV NODE_ENV=production # https://github.com/nodejs/docker-node/blob/master/docs/BestPractices.md#non-root-user diff --git a/services/pds/index.js b/services/pds/index.js index 4be4833e942..112d63edf90 100644 --- a/services/pds/index.js +++ b/services/pds/index.js @@ -12,135 +12,49 @@ require('dd-trace') // Only works with commonjs // Tracer code above must come before anything else const path = require('path') -const { KmsKeypair, S3BlobStore } = require('@atproto/aws') const { - Database, - ServerConfig, PDS, - ViewMaintainer, + envToCfg, + envToSecrets, + readEnv, + httpLogger, PeriodicModerationActionReversal, } = require('@atproto/pds') -const { Secp256k1Keypair } = require('@atproto/crypto') +const pkg = require('@atproto/pds/package.json') const main = async () => { - const env = getEnv() - // Migrate using credentialed user - const migrateDb = Database.postgres({ - url: pgUrl(env.dbMigrateCreds), - schema: env.dbSchema, - // We need one connection for the - // view-maintainer lock then one for anything else. - poolSize: 2, - }) - // Use lower-credentialed user to run the app - const db = Database.postgres({ - url: pgUrl(env.dbCreds), - schema: env.dbSchema, - poolSize: env.dbPoolSize, - poolMaxUses: env.dbPoolMaxUses, - poolIdleTimeoutMs: env.dbPoolIdleTimeoutMs, - }) - const s3Blobstore = new S3BlobStore({ bucket: env.s3Bucket }) - const repoSigningKey = await Secp256k1Keypair.import(env.repoSigningKey) - const plcRotationKey = await KmsKeypair.load({ - keyId: env.plcRotationKeyId, - }) - let recoveryKey - if (env.recoveryKeyId.startsWith('did:')) { - recoveryKey = env.recoveryKeyId - } else { - const recoveryKeypair = await KmsKeypair.load({ - keyId: env.recoveryKeyId, - }) - recoveryKey = recoveryKeypair.did() - } - const cfg = ServerConfig.readEnv({ - port: env.port, - recoveryKey, - emailSmtpUrl: smtpUrl({ - host: env.smtpHost, - username: env.smtpUsername, - password: env.smtpPassword, - }), - }) - const pds = PDS.create({ - db, - blobstore: s3Blobstore, - repoSigningKey, - plcRotationKey, - config: cfg, - }) - const viewMaintainer = new ViewMaintainer(migrateDb) - const viewMaintainerRunning = viewMaintainer.run() + const env = readEnv() + env.version ??= pkg.version + const cfg = envToCfg(env) + const secrets = envToSecrets(env) + const pds = await PDS.create(cfg, secrets) // If the PDS is configured to proxy moderation, this will be running on appview instead of pds. // Also don't run this on the sequencer leader, which may not be configured regarding moderation proxying at all. const periodicModerationActionReversal = - pds.ctx.shouldProxyModeration() || pds.ctx.cfg.sequencerLeaderEnabled + pds.ctx.cfg.bskyAppView.proxyModeration || + pds.ctx.cfg.sequencerLeaderEnabled ? null : new PeriodicModerationActionReversal(pds.ctx) const periodicModerationActionReversalRunning = periodicModerationActionReversal?.run() await pds.start() + + httpLogger.info('pds is running') // Graceful shutdown (see also https://aws.amazon.com/blogs/containers/graceful-shutdowns-with-ecs/) process.on('SIGTERM', async () => { - // Gracefully shutdown periodic-moderation-action-reversal before destroying pds instance + httpLogger.info('pds is stopping') + periodicModerationActionReversal?.destroy() await periodicModerationActionReversalRunning await pds.destroy() - // Gracefully shutdown view-maintainer - viewMaintainer.destroy() - await viewMaintainerRunning - - // Gracefully shutdown db - await migrateDb.close() + httpLogger.info('pds is stopped') }) } -const pgUrl = ({ - username = 'postgres', - password = 'postgres', - host = 'localhost', - port = '5432', - database = 'postgres', - sslmode, -}) => { - const enc = encodeURIComponent - return `postgresql://${username}:${enc( - password, - )}@${host}:${port}/${database}${sslmode ? `?sslmode=${enc(sslmode)}` : ''}` -} - -const smtpUrl = ({ username, password, host }) => { - const enc = encodeURIComponent - return `smtps://${username}:${enc(password)}@${host}` -} - -const maybeParseInt = (str) => { - const parsed = parseInt(str) - return isNaN(parsed) ? undefined : parsed -} - -const getEnv = () => ({ - port: parseInt(process.env.PORT), - plcRotationKeyId: process.env.PLC_ROTATION_KEY_ID, - repoSigningKey: process.env.REPO_SIGNING_KEY, - recoveryKeyId: process.env.RECOVERY_KEY_ID, - dbCreds: JSON.parse(process.env.DB_CREDS_JSON), - dbMigrateCreds: JSON.parse(process.env.DB_MIGRATE_CREDS_JSON), - dbSchema: process.env.DB_SCHEMA || undefined, - dbPoolSize: maybeParseInt(process.env.DB_POOL_SIZE), - dbPoolMaxUses: maybeParseInt(process.env.DB_POOL_MAX_USES), - dbPoolIdleTimeoutMs: maybeParseInt(process.env.DB_POOL_IDLE_TIMEOUT_MS), - smtpHost: process.env.SMTP_HOST, - smtpUsername: process.env.SMTP_USERNAME, - smtpPassword: process.env.SMTP_PASSWORD, - s3Bucket: process.env.S3_BUCKET_NAME, -}) - const maintainXrpcResource = (span, req) => { // Show actual xrpc method as resource rather than the route pattern if (span && req.originalUrl?.startsWith('/xrpc/')) {