forked from advplyr/audiobookshelf
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
3 changed files
with
358 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
const Logger = require('../Logger') | ||
|
||
/** | ||
* @typedef MigrationContext | ||
* @property {import('sequelize').QueryInterface} queryInterface - a suquelize QueryInterface object. | ||
* @property {import('../Logger')} logger - a Logger object. | ||
* | ||
* @typedef MigrationOptions | ||
* @property {MigrationContext} context - an object containing the migration context. | ||
*/ | ||
|
||
/** | ||
* This upward migration script cleans any duplicate series in the `Series` table and | ||
* adds a unique index on the `name` and `libraryId` columns. | ||
* | ||
* @param {MigrationOptions} options - an object containing the migration context. | ||
* @returns {Promise<void>} - A promise that resolves when the migration is complete. | ||
*/ | ||
async function up({ context: { queryInterface, logger } }) { | ||
// Upwards migration script | ||
logger.info('UPGRADE BEGIN: 2.13.5-series-column-unique ') | ||
|
||
// Use the queryInterface to get the series table and find duplicates in the `name` column | ||
const [duplicates] = await queryInterface.sequelize.query(` | ||
SELECT name, libraryId, MAX(updatedAt) AS latestUpdatedAt, COUNT(name) AS count | ||
FROM Series | ||
GROUP BY name, libraryId | ||
HAVING COUNT(name) > 1 | ||
`) | ||
|
||
// Print out how many duplicates were found | ||
logger.info(`[2.13.5 migration] Found ${duplicates.length} duplicate series`) | ||
|
||
// Iterate over each duplicate series | ||
for (const duplicate of duplicates) { | ||
// Report the series name that is being deleted | ||
logger.info(`[2.13.5 migration] Deduplicating series "${duplicate.name}" in library ${duplicate.libraryId}`) | ||
|
||
// Get all the most recent series which matches the `name` and `libraryId` | ||
const [mostRecentSeries] = await queryInterface.sequelize.query( | ||
` | ||
SELECT id | ||
FROM Series | ||
WHERE name = :name AND libraryId = :libraryId | ||
ORDER BY updatedAt DESC | ||
LIMIT 1 | ||
`, | ||
{ | ||
replacements: { | ||
name: duplicate.name, | ||
libraryId: duplicate.libraryId | ||
}, | ||
type: queryInterface.sequelize.QueryTypes.SELECT | ||
} | ||
) | ||
|
||
if (mostRecentSeries) { | ||
// Update all BookSeries records for this series to point to the most recent series | ||
const [seriesUpdated] = await queryInterface.sequelize.query( | ||
` | ||
UPDATE BookSeries | ||
SET seriesId = :mostRecentSeriesId | ||
WHERE seriesId IN ( | ||
SELECT id | ||
FROM Series | ||
WHERE name = :name AND libraryId = :libraryId | ||
AND id != :mostRecentSeriesId | ||
) | ||
`, | ||
{ | ||
replacements: { | ||
name: duplicate.name, | ||
libraryId: duplicate.libraryId, | ||
mostRecentSeriesId: mostRecentSeries.id | ||
} | ||
} | ||
) | ||
|
||
// Delete the older series | ||
const seriesDeleted = await queryInterface.sequelize.query( | ||
` | ||
DELETE FROM Series | ||
WHERE name = :name AND libraryId = :libraryId | ||
AND id != :mostRecentSeriesId | ||
`, | ||
{ | ||
replacements: { | ||
name: duplicate.name, | ||
libraryId: duplicate.libraryId, | ||
mostRecentSeriesId: mostRecentSeries.id | ||
} | ||
} | ||
) | ||
} | ||
} | ||
|
||
logger.info(`[2.13.5 migration] Deduplication complete`) | ||
|
||
// Create a unique index based on the name and library ID for the `Series` table | ||
await queryInterface.addIndex('Series', ['name', 'libraryId'], { | ||
unique: true, | ||
name: 'unique_series_name_per_library' | ||
}) | ||
logger.info('Added unique index on Series.name and Series.libraryId') | ||
|
||
logger.info('UPGRADE END: 2.13.5-series-column-unique ') | ||
} | ||
|
||
/** | ||
* This removes the unique index on the `Series` table. | ||
* | ||
* @param {MigrationOptions} options - an object containing the migration context. | ||
* @returns {Promise<void>} - A promise that resolves when the migration is complete. | ||
*/ | ||
async function down({ context: { queryInterface, logger } }) { | ||
// Downward migration script | ||
logger.info('DOWNGRADE BEGIN: 2.13.5-series-column-unique ') | ||
|
||
// Remove the unique index | ||
await queryInterface.removeIndex('Series', 'unique_series_name_per_library') | ||
logger.info('Removed unique index on Series.name and Series.libraryId') | ||
|
||
logger.info('DOWNGRADE END: 2.13.5-series-column-unique ') | ||
} | ||
|
||
module.exports = { up, down } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
226 changes: 226 additions & 0 deletions
226
test/server/migrations/v2.13.5-series-column-unique.test.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,226 @@ | ||
const { expect } = require('chai') | ||
const sinon = require('sinon') | ||
const { up, down } = require('../../../server/migrations/v2.13.5-series-column-unique') | ||
const { Sequelize } = require('sequelize') | ||
const Logger = require('../../../server/Logger') | ||
const { query } = require('express') | ||
const { logger } = require('sequelize/lib/utils/logger') | ||
const e = require('express') | ||
|
||
describe('migration_example', () => { | ||
let sequelize | ||
let queryInterface | ||
let loggerInfoStub | ||
let series1Id | ||
let series2Id | ||
let series3Id | ||
let series1Id_dup | ||
let series3Id_dup | ||
let book1Id | ||
let book2Id | ||
let book3Id | ||
let book4Id | ||
let book5Id | ||
let library1Id | ||
let library2Id | ||
let bookSeries1Id | ||
let bookSeries2Id | ||
let bookSeries3Id | ||
let bookSeries1Id_dup | ||
let bookSeries3Id_dup | ||
|
||
beforeEach(() => { | ||
sequelize = new Sequelize({ dialect: 'sqlite', storage: ':memory:', logging: false }) | ||
queryInterface = sequelize.getQueryInterface() | ||
loggerInfoStub = sinon.stub(Logger, 'info') | ||
}) | ||
|
||
afterEach(() => { | ||
sinon.restore() | ||
}) | ||
|
||
describe('up', () => { | ||
beforeEach(async () => { | ||
await queryInterface.createTable('Series', { | ||
id: { type: Sequelize.UUID, primaryKey: true }, | ||
name: { type: Sequelize.STRING, allowNull: false }, | ||
libraryId: { type: Sequelize.UUID, allowNull: false }, | ||
createdAt: { type: Sequelize.DATE, allowNull: false }, | ||
updatedAt: { type: Sequelize.DATE, allowNull: false } | ||
}) | ||
await queryInterface.createTable('BookSeries', { | ||
id: { type: Sequelize.UUID, primaryKey: true }, | ||
bookId: { type: Sequelize.UUID, allowNull: false }, | ||
seriesId: { type: Sequelize.UUID, allowNull: false } | ||
}) | ||
// Set UUIDs for the tests | ||
series1Id = 'fc086255-3fd2-4a95-8a28-840d9206501b' | ||
series2Id = '70f46ac2-ee48-4b3c-9822-933cc15c29bd' | ||
series3Id = '01cac008-142b-4e15-b0ff-cf7cc2c5b64e' | ||
series1Id_dup = 'ad0b3b3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
series3Id_dup = '4b3b4b3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
series1Id_dup2 = '0123456a-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
book1Id = '4a38b6e5-0ae4-4de4-b119-4e33891bd63f' | ||
book2Id = '8bc2e61d-47f6-42ef-a3f4-93cf2f1de82f' | ||
book3Id = 'ec9bbaaf-1e55-457f-b59c-bd2bd955a404' | ||
book4Id = '876f3b3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
book5Id = '4e5b4b3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
book6Id = 'abcda123-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
library1Id = '3a5a1c7c-a914-472e-88b0-b871ceae63e7' | ||
library2Id = 'fd6c324a-4f3a-4bb0-99d6-7a330e765e7e' | ||
bookSeries1Id = 'eca24687-2241-4ffa-a9b3-02a0ba03c763' | ||
bookSeries2Id = '56f56105-813b-4395-9689-fd04198e7d5d' | ||
bookSeries3Id = '404a1761-c710-4d86-9d78-68d9a9c0fb6b' | ||
bookSeries1Id_dup = '8bea3b3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
bookSeries3Id_dup = '89656a3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
bookSeries1Id_dup2 = '9bea3b3b-4b3b-4b3b-4b3b-4b3b4b3b4b3b' | ||
}) | ||
afterEach(async () => { | ||
await queryInterface.dropTable('Series') | ||
await queryInterface.dropTable('BookSeries') | ||
}) | ||
it('upgrade with no duplicate series', async () => { | ||
// Add some entries to the Series table using the UUID for the ids | ||
await queryInterface.bulkInsert('Series', [ | ||
{ id: series1Id, name: 'Series 1', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series2Id, name: 'Series 2', libraryId: library2Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series3Id, name: 'Series 3', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() } | ||
]) | ||
// Add some entries to the BookSeries table | ||
await queryInterface.bulkInsert('BookSeries', [ | ||
{ id: bookSeries1Id, bookId: book1Id, seriesId: series1Id }, | ||
{ id: bookSeries2Id, bookId: book2Id, seriesId: series2Id }, | ||
{ id: bookSeries3Id, bookId: book3Id, seriesId: series3Id } | ||
]) | ||
|
||
await up({ context: { queryInterface, logger: Logger } }) | ||
|
||
expect(loggerInfoStub.callCount).to.equal(5) | ||
expect(loggerInfoStub.getCall(0).calledWith(sinon.match('UPGRADE BEGIN: 2.13.5-series-column-unique '))).to.be.true | ||
expect(loggerInfoStub.getCall(1).calledWith(sinon.match('[2.13.5 migration] Found 0 duplicate series'))).to.be.true | ||
expect(loggerInfoStub.getCall(2).calledWith(sinon.match('[2.13.5 migration] Deduplication complete'))).to.be.true | ||
expect(loggerInfoStub.getCall(3).calledWith(sinon.match('Added unique index on Series.name and Series.libraryId'))).to.be.true | ||
expect(loggerInfoStub.getCall(4).calledWith(sinon.match('UPGRADE END: 2.13.5-series-column-unique '))).to.be.true | ||
// Validate rows in tables | ||
const series = await queryInterface.sequelize.query('SELECT "id", "name", "libraryId" FROM Series', { type: queryInterface.sequelize.QueryTypes.SELECT }) | ||
expect(series).to.have.length(3) | ||
expect(series).to.deep.include({ id: series1Id, name: 'Series 1', libraryId: library1Id }) | ||
expect(series).to.deep.include({ id: series2Id, name: 'Series 2', libraryId: library2Id }) | ||
expect(series).to.deep.include({ id: series3Id, name: 'Series 3', libraryId: library1Id }) | ||
const bookSeries = await queryInterface.sequelize.query('SELECT "id", "bookId", "seriesId" FROM BookSeries', { type: queryInterface.sequelize.QueryTypes.SELECT }) | ||
expect(bookSeries).to.have.length(3) | ||
expect(bookSeries).to.deep.include({ id: bookSeries1Id, bookId: book1Id, seriesId: series1Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries2Id, bookId: book2Id, seriesId: series2Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries3Id, bookId: book3Id, seriesId: series3Id }) | ||
}) | ||
it('upgrade with duplicate series', async () => { | ||
// Add some entries to the Series table using the UUID for the ids | ||
await queryInterface.bulkInsert('Series', [ | ||
{ id: series1Id, name: 'Series 1', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series2Id, name: 'Series 2', libraryId: library2Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series3Id, name: 'Series 3', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series1Id_dup, name: 'Series 1', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series3Id_dup, name: 'Series 3', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series1Id_dup2, name: 'Series 1', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() } | ||
]) | ||
// Add some entries to the BookSeries table | ||
await queryInterface.bulkInsert('BookSeries', [ | ||
{ id: bookSeries1Id, bookId: book1Id, seriesId: series1Id }, | ||
{ id: bookSeries2Id, bookId: book2Id, seriesId: series2Id }, | ||
{ id: bookSeries3Id, bookId: book3Id, seriesId: series3Id }, | ||
{ id: bookSeries1Id_dup, bookId: book4Id, seriesId: series1Id_dup }, | ||
{ id: bookSeries3Id_dup, bookId: book5Id, seriesId: series3Id_dup }, | ||
{ id: bookSeries1Id_dup2, bookId: book6Id, seriesId: series1Id_dup2 } | ||
]) | ||
|
||
await up({ context: { queryInterface, logger: Logger } }) | ||
|
||
expect(loggerInfoStub.callCount).to.equal(7) | ||
expect(loggerInfoStub.getCall(0).calledWith(sinon.match('UPGRADE BEGIN: 2.13.5-series-column-unique '))).to.be.true | ||
expect(loggerInfoStub.getCall(1).calledWith(sinon.match('[2.13.5 migration] Found 2 duplicate series'))).to.be.true | ||
expect(loggerInfoStub.getCall(2).calledWith(sinon.match('[2.13.5 migration] Deduplicating series "Series 1" in library 3a5a1c7c-a914-472e-88b0-b871ceae63e7'))).to.be.true | ||
expect(loggerInfoStub.getCall(3).calledWith(sinon.match('[2.13.5 migration] Deduplicating series "Series 3" in library 3a5a1c7c-a914-472e-88b0-b871ceae63e7'))).to.be.true | ||
expect(loggerInfoStub.getCall(4).calledWith(sinon.match('[2.13.5 migration] Deduplication complete'))).to.be.true | ||
expect(loggerInfoStub.getCall(5).calledWith(sinon.match('Added unique index on Series.name and Series.libraryId'))).to.be.true | ||
expect(loggerInfoStub.getCall(6).calledWith(sinon.match('UPGRADE END: 2.13.5-series-column-unique '))).to.be.true | ||
// Validate rows | ||
const series = await queryInterface.sequelize.query('SELECT "id", "name", "libraryId" FROM Series', { type: queryInterface.sequelize.QueryTypes.SELECT }) | ||
expect(series).to.have.length(3) | ||
expect(series).to.deep.include({ id: series1Id, name: 'Series 1', libraryId: library1Id }) | ||
expect(series).to.deep.include({ id: series2Id, name: 'Series 2', libraryId: library2Id }) | ||
expect(series).to.deep.include({ id: series3Id, name: 'Series 3', libraryId: library1Id }) | ||
const bookSeries = await queryInterface.sequelize.query('SELECT "id", "bookId", "seriesId" FROM BookSeries', { type: queryInterface.sequelize.QueryTypes.SELECT }) | ||
expect(bookSeries).to.have.length(6) | ||
expect(bookSeries).to.deep.include({ id: bookSeries1Id, bookId: book1Id, seriesId: series1Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries2Id, bookId: book2Id, seriesId: series2Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries3Id, bookId: book3Id, seriesId: series3Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries1Id_dup, bookId: book4Id, seriesId: series1Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries3Id_dup, bookId: book5Id, seriesId: series3Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries1Id_dup2, bookId: book6Id, seriesId: series1Id }) | ||
}) | ||
it('update with same series name in different libraries', async () => { | ||
// Add some entries to the Series table using the UUID for the ids | ||
await queryInterface.bulkInsert('Series', [ | ||
{ id: series1Id, name: 'Series 1', libraryId: library1Id, createdAt: new Date(), updatedAt: new Date() }, | ||
{ id: series2Id, name: 'Series 1', libraryId: library2Id, createdAt: new Date(), updatedAt: new Date() } | ||
]) | ||
// Add some entries to the BookSeries table | ||
await queryInterface.bulkInsert('BookSeries', [ | ||
{ id: bookSeries1Id, bookId: book1Id, seriesId: series1Id }, | ||
{ id: bookSeries2Id, bookId: book2Id, seriesId: series2Id } | ||
]) | ||
|
||
await up({ context: { queryInterface, logger: Logger } }) | ||
|
||
expect(loggerInfoStub.callCount).to.equal(5) | ||
expect(loggerInfoStub.getCall(0).calledWith(sinon.match('UPGRADE BEGIN: 2.13.5-series-column-unique '))).to.be.true | ||
expect(loggerInfoStub.getCall(1).calledWith(sinon.match('[2.13.5 migration] Found 0 duplicate series'))).to.be.true | ||
expect(loggerInfoStub.getCall(2).calledWith(sinon.match('[2.13.5 migration] Deduplication complete'))).to.be.true | ||
expect(loggerInfoStub.getCall(3).calledWith(sinon.match('Added unique index on Series.name and Series.libraryId'))).to.be.true | ||
expect(loggerInfoStub.getCall(4).calledWith(sinon.match('UPGRADE END: 2.13.5-series-column-unique '))).to.be.true | ||
// Validate rows | ||
const series = await queryInterface.sequelize.query('SELECT "id", "name", "libraryId" FROM Series', { type: queryInterface.sequelize.QueryTypes.SELECT }) | ||
expect(series).to.have.length(2) | ||
expect(series).to.deep.include({ id: series1Id, name: 'Series 1', libraryId: library1Id }) | ||
expect(series).to.deep.include({ id: series2Id, name: 'Series 1', libraryId: library2Id }) | ||
const bookSeries = await queryInterface.sequelize.query('SELECT "id", "bookId", "seriesId" FROM BookSeries', { type: queryInterface.sequelize.QueryTypes.SELECT }) | ||
expect(bookSeries).to.have.length(2) | ||
expect(bookSeries).to.deep.include({ id: bookSeries1Id, bookId: book1Id, seriesId: series1Id }) | ||
expect(bookSeries).to.deep.include({ id: bookSeries2Id, bookId: book2Id, seriesId: series2Id }) | ||
}) | ||
}) | ||
|
||
describe('down', () => { | ||
beforeEach(async () => { | ||
await queryInterface.createTable('Series', { | ||
id: { type: Sequelize.UUID, primaryKey: true }, | ||
name: { type: Sequelize.STRING, allowNull: false }, | ||
libraryId: { type: Sequelize.UUID, allowNull: false }, | ||
createdAt: { type: Sequelize.DATE, allowNull: false }, | ||
updatedAt: { type: Sequelize.DATE, allowNull: false } | ||
}) | ||
await queryInterface.createTable('BookSeries', { | ||
id: { type: Sequelize.UUID, primaryKey: true }, | ||
bookId: { type: Sequelize.UUID, allowNull: false }, | ||
seriesId: { type: Sequelize.UUID, allowNull: false } | ||
}) | ||
}) | ||
it('should not have unique constraint on series name and libraryId', async () => { | ||
await up({ context: { queryInterface, logger: Logger } }) | ||
await down({ context: { queryInterface, logger: Logger } }) | ||
|
||
expect(loggerInfoStub.callCount).to.equal(8) | ||
expect(loggerInfoStub.getCall(0).calledWith(sinon.match('UPGRADE BEGIN: 2.13.5-series-column-unique '))).to.be.true | ||
expect(loggerInfoStub.getCall(1).calledWith(sinon.match('[2.13.5 migration] Found 0 duplicate series'))).to.be.true | ||
expect(loggerInfoStub.getCall(2).calledWith(sinon.match('[2.13.5 migration] Deduplication complete'))).to.be.true | ||
expect(loggerInfoStub.getCall(3).calledWith(sinon.match('Added unique index on Series.name and Series.libraryId'))).to.be.true | ||
expect(loggerInfoStub.getCall(4).calledWith(sinon.match('UPGRADE END: 2.13.5-series-column-unique '))).to.be.true | ||
expect(loggerInfoStub.getCall(5).calledWith(sinon.match('DOWNGRADE BEGIN: 2.13.5-series-column-unique '))).to.be.true | ||
expect(loggerInfoStub.getCall(6).calledWith(sinon.match('Removed unique index on Series.name and Series.libraryId'))).to.be.true | ||
expect(loggerInfoStub.getCall(7).calledWith(sinon.match('DOWNGRADE END: 2.13.5-series-column-unique '))).to.be.true | ||
// Ensure index does not exist | ||
const indexes = await queryInterface.showIndex('Series') | ||
expect(indexes).to.not.deep.include({ tableName: 'Series', unique: true, fields: ['name', 'libraryId'], name: 'unique_series_name_per_library' }) | ||
}) | ||
}) | ||
}) |