feat(up): handle storage initialization errors and present missing loader errors in a better way
This commit is contained in:
parent
c1d55978d7
commit
a8db22680e
4 changed files with 169 additions and 55 deletions
|
|
@ -17,7 +17,7 @@ type Mocked<T> = {
|
|||
|
||||
describe('up', () => {
|
||||
it('returns 0 and finishes without an error when there are no migrations to run', async () => {
|
||||
const { reporter, run } = getUpCommand([], []);
|
||||
const { reporter, run } = getUpCommand([], getStorage([]));
|
||||
|
||||
const exitCode = await run();
|
||||
|
||||
|
|
@ -41,55 +41,59 @@ describe('up', () => {
|
|||
assert.deepStrictEqual(reporter.onFinished.mock.calls[0]?.arguments, [[], undefined]);
|
||||
});
|
||||
|
||||
it('throws when there are migration file extensions without a corresponding loader plugin', async () => {
|
||||
const { reporter, run } = getUpCommand(['some_file.sql'], []);
|
||||
it('returns 1 and finishes with an error when there are migration file extensions without a corresponding loader plugin', async () => {
|
||||
const { reporter, run } = getUpCommand(['some_other.js', 'some_file.sql'], getStorage([]));
|
||||
|
||||
await assert.rejects(
|
||||
async () => {
|
||||
return run();
|
||||
},
|
||||
{
|
||||
name: 'Error [ERR_BAD_OPT]',
|
||||
message: 'No loader plugin found for file extension: .sql',
|
||||
},
|
||||
);
|
||||
const exitCode = await run();
|
||||
|
||||
assert.strictEqual(reporter.onInit.mock.calls.length, 0);
|
||||
assert.strictEqual(exitCode, 1);
|
||||
assert.strictEqual(reporter.onInit.mock.calls.length, 1);
|
||||
assert.strictEqual(reporter.onCollectedMigrations.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onLockedMigrations.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationStart.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationSuccess.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationError.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onFinished.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationError.mock.calls.length, 1);
|
||||
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 1);
|
||||
const args = reporter.onFinished.mock.calls[0]?.arguments;
|
||||
assert.strictEqual(args?.length, 2);
|
||||
const entries = args[0];
|
||||
const error = args[1];
|
||||
assert.strictEqual(entries.length, 2);
|
||||
assert.deepStrictEqual(
|
||||
entries.map((entry) => `${entry.name} (${entry.status})`),
|
||||
['some_other.js (skipped)', 'some_file.sql (failed)'],
|
||||
);
|
||||
assert.strictEqual(error?.message, 'No loader plugin found for file extension: .sql');
|
||||
});
|
||||
|
||||
it('throws when there are migration file extensions without a corresponding loader plugin in dry-run mode as well', async () => {
|
||||
const { reporter, run } = getUpCommand(['some_file.sql'], []);
|
||||
it('returns 1 and finishes with an error when there are migration file extensions without a corresponding loader plugin in dry-run mode as well', async () => {
|
||||
const { reporter, run } = getUpCommand(['some_other.js', 'some_file.sql'], getStorage([]));
|
||||
|
||||
await assert.rejects(
|
||||
async () => {
|
||||
return run(true);
|
||||
},
|
||||
{
|
||||
name: 'Error [ERR_BAD_OPT]',
|
||||
message: 'No loader plugin found for file extension: .sql',
|
||||
},
|
||||
);
|
||||
const exitCode = await run();
|
||||
|
||||
assert.strictEqual(reporter.onInit.mock.calls.length, 0);
|
||||
assert.strictEqual(exitCode, 1);
|
||||
assert.strictEqual(reporter.onInit.mock.calls.length, 1);
|
||||
assert.strictEqual(reporter.onCollectedMigrations.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onLockedMigrations.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationStart.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationSuccess.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationError.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onFinished.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationError.mock.calls.length, 1);
|
||||
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 1);
|
||||
const args = reporter.onFinished.mock.calls[0]?.arguments;
|
||||
assert.strictEqual(args?.length, 2);
|
||||
const entries = args[0];
|
||||
const error = args[1];
|
||||
assert.strictEqual(entries.length, 2);
|
||||
assert.deepStrictEqual(
|
||||
entries.map((entry) => `${entry.name} (${entry.status})`),
|
||||
['some_other.js (skipped)', 'some_file.sql (failed)'],
|
||||
);
|
||||
assert.strictEqual(error?.message, 'No loader plugin found for file extension: .sql');
|
||||
});
|
||||
|
||||
it('returns 1 and finishes with an error when there are failed migrations in the history', async () => {
|
||||
const failedEntry = toEntry('some_failed_migration.js', 'failed');
|
||||
const { reporter, run } = getUpCommand([failedEntry.name], [failedEntry]);
|
||||
const { reporter, run } = getUpCommand([failedEntry.name], getStorage([failedEntry]));
|
||||
|
||||
const exitCode = await run();
|
||||
|
||||
|
|
@ -120,8 +124,48 @@ describe('up', () => {
|
|||
assert.strictEqual(finishedEntry.error, error);
|
||||
assert.strictEqual(error?.cause, failedEntry.error);
|
||||
});
|
||||
|
||||
it("returns 1 and finishes with an error when the storage couldn't be initialized", async () => {
|
||||
const { reporter, run } = getUpCommand(['some_migration.js']);
|
||||
|
||||
const exitCode = await run();
|
||||
|
||||
assert.strictEqual(exitCode, 1);
|
||||
assert.strictEqual(reporter.onInit.mock.calls.length, 1);
|
||||
assert.deepStrictEqual(reporter.onInit.mock.calls[0]?.arguments, [
|
||||
{
|
||||
command: 'up',
|
||||
cwd: '/emigrate',
|
||||
dry: false,
|
||||
directory: 'migrations',
|
||||
},
|
||||
]);
|
||||
assert.strictEqual(reporter.onCollectedMigrations.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onLockedMigrations.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationStart.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationSuccess.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationError.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0);
|
||||
assert.strictEqual(reporter.onFinished.mock.calls.length, 1);
|
||||
const args = reporter.onFinished.mock.calls[0]?.arguments;
|
||||
assert.strictEqual(args?.length, 2);
|
||||
const entries = args[0];
|
||||
const error = args[1];
|
||||
const cause = getErrorCause(error);
|
||||
assert.deepStrictEqual(entries, []);
|
||||
assert.strictEqual(error?.message, 'Could not initialize storage');
|
||||
assert.strictEqual(cause?.message, 'No storage configured');
|
||||
});
|
||||
});
|
||||
|
||||
function getErrorCause(error: Error | undefined): Error | undefined {
|
||||
if (error?.cause instanceof Error) {
|
||||
return error.cause;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function toMigration(cwd: string, directory: string, name: string): MigrationMetadata {
|
||||
return {
|
||||
name,
|
||||
|
|
@ -164,11 +208,22 @@ async function noop() {
|
|||
// noop
|
||||
}
|
||||
|
||||
function getUpCommand(
|
||||
migrationFiles: string[],
|
||||
historyEntries: Array<string | MigrationHistoryEntry>,
|
||||
plugins?: Plugin[],
|
||||
) {
|
||||
function getStorage(historyEntries: Array<string | MigrationHistoryEntry>) {
|
||||
const storage: Mocked<Storage> = {
|
||||
lock: mock.fn(),
|
||||
unlock: mock.fn(),
|
||||
getHistory: mock.fn(async function* () {
|
||||
yield* toEntries(historyEntries);
|
||||
}),
|
||||
remove: mock.fn(),
|
||||
onSuccess: mock.fn(),
|
||||
onError: mock.fn(),
|
||||
};
|
||||
|
||||
return storage;
|
||||
}
|
||||
|
||||
function getUpCommand(migrationFiles: string[], storage?: Mocked<Storage>, plugins?: Plugin[]) {
|
||||
const reporter: Mocked<Required<EmigrateReporter>> = {
|
||||
onFinished: mock.fn(noop),
|
||||
onInit: mock.fn(noop),
|
||||
|
|
@ -184,23 +239,16 @@ function getUpCommand(
|
|||
onMigrationSkip: mock.fn(noop),
|
||||
};
|
||||
|
||||
const storage: Mocked<Storage> = {
|
||||
lock: mock.fn(),
|
||||
unlock: mock.fn(),
|
||||
getHistory: mock.fn(async function* () {
|
||||
yield* toEntries(historyEntries);
|
||||
}),
|
||||
remove: mock.fn(),
|
||||
onSuccess: mock.fn(),
|
||||
onError: mock.fn(),
|
||||
};
|
||||
|
||||
const run = async (dry = false) => {
|
||||
return upCommand({
|
||||
cwd: '/emigrate',
|
||||
directory: 'migrations',
|
||||
storage: {
|
||||
async initializeStorage() {
|
||||
if (!storage) {
|
||||
throw new Error('No storage configured');
|
||||
}
|
||||
|
||||
return storage;
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
MigrationLoadError,
|
||||
MigrationRunError,
|
||||
MissingOptionError,
|
||||
StorageInitError,
|
||||
} from '../errors.js';
|
||||
import { type Config } from '../types.js';
|
||||
import { withLeadingPeriod } from '../with-leading-period.js';
|
||||
|
|
@ -29,6 +30,29 @@ type ExtraFlags = {
|
|||
const lazyDefaultReporter = async () => import('../reporters/default.js');
|
||||
const lazyPluginLoaderJs = async () => import('../plugin-loader-js.js');
|
||||
|
||||
const toError = (error: unknown) => (error instanceof Error ? error : new Error(String(error)));
|
||||
|
||||
type Fn<Args extends any[], Result> = (...args: Args) => Result;
|
||||
type Result<T> = [value: T, error: undefined] | [value: undefined, error: Error];
|
||||
|
||||
/**
|
||||
* Execute a function and return a result tuple
|
||||
*
|
||||
* This is a helper function to make it easier to handle errors without the extra nesting of try/catch
|
||||
*/
|
||||
const exec = async <Args extends any[], Return extends Promise<any>>(
|
||||
fn: Fn<Args, Return>,
|
||||
...args: Args
|
||||
): Promise<Result<Awaited<Return>>> => {
|
||||
try {
|
||||
const result = await fn(...args);
|
||||
|
||||
return [result, undefined];
|
||||
} catch (error) {
|
||||
return [undefined, toError(error)];
|
||||
}
|
||||
};
|
||||
|
||||
export default async function upCommand({
|
||||
storage: storageConfig,
|
||||
reporter: reporterConfig,
|
||||
|
|
@ -48,7 +72,6 @@ export default async function upCommand({
|
|||
throw new BadOptionError('storage', 'No storage found, please specify a storage using the storage option');
|
||||
}
|
||||
|
||||
const storage = await storagePlugin.initializeStorage();
|
||||
const reporter = await getOrLoadReporter([reporterConfig ?? lazyDefaultReporter]);
|
||||
|
||||
if (!reporter) {
|
||||
|
|
@ -58,6 +81,16 @@ export default async function upCommand({
|
|||
);
|
||||
}
|
||||
|
||||
await reporter.onInit?.({ command: 'up', cwd, dry, directory });
|
||||
|
||||
const [storage, storageError] = await exec(async () => storagePlugin.initializeStorage());
|
||||
|
||||
if (storageError) {
|
||||
await reporter.onFinished?.([], new StorageInitError('Could not initialize storage', { cause: storageError }));
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
const migrationFiles = await getMigrations(cwd, directory);
|
||||
const failedEntries: MigrationMetadataFinished[] = [];
|
||||
|
||||
|
|
@ -106,14 +139,36 @@ export default async function upCommand({
|
|||
),
|
||||
);
|
||||
|
||||
for (const [extension, loader] of loaderByExtension) {
|
||||
for await (const [extension, loader] of loaderByExtension) {
|
||||
if (!loader) {
|
||||
throw new BadOptionError('plugin', `No loader plugin found for file extension: ${extension}`);
|
||||
const finishedMigrations: MigrationMetadataFinished[] = [...failedEntries];
|
||||
|
||||
for await (const failedEntry of failedEntries) {
|
||||
await reporter.onMigrationError?.(failedEntry, failedEntry.error!);
|
||||
}
|
||||
|
||||
for await (const migration of migrationFiles) {
|
||||
if (migration.extension === extension) {
|
||||
const error = new BadOptionError('plugin', `No loader plugin found for file extension: ${extension}`);
|
||||
const finishedMigration: MigrationMetadataFinished = { ...migration, duration: 0, status: 'failed', error };
|
||||
await reporter.onMigrationError?.(finishedMigration, error);
|
||||
finishedMigrations.push(finishedMigration);
|
||||
} else {
|
||||
const finishedMigration: MigrationMetadataFinished = { ...migration, duration: 0, status: 'skipped' };
|
||||
await reporter.onMigrationSkip?.(finishedMigration);
|
||||
finishedMigrations.push(finishedMigration);
|
||||
}
|
||||
}
|
||||
|
||||
await reporter.onFinished?.(
|
||||
finishedMigrations,
|
||||
new BadOptionError('plugin', `No loader plugin found for file extension: ${extension}`),
|
||||
);
|
||||
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
await reporter.onInit?.({ command: 'up', cwd, dry, directory });
|
||||
|
||||
await reporter.onCollectedMigrations?.([...failedEntries, ...migrationFiles]);
|
||||
|
||||
if (migrationFiles.length === 0 || dry || failedEntries.length > 0) {
|
||||
|
|
@ -150,7 +205,7 @@ export default async function upCommand({
|
|||
await reporter.onMigrationSkip?.({ ...migration, duration: 0, status: 'skipped' });
|
||||
}
|
||||
|
||||
await reporter.onFinished?.([], error instanceof Error ? error : new Error(String(error)));
|
||||
await reporter.onFinished?.([], toError(error));
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
|
@ -218,7 +273,7 @@ export default async function upCommand({
|
|||
|
||||
finishedMigrations.push(finishedMigration);
|
||||
} catch (error) {
|
||||
const errorInstance = error instanceof Error ? error : new Error(String(error));
|
||||
const errorInstance = toError(error);
|
||||
const serializedError = serializeError(errorInstance);
|
||||
const duration = getDuration(start);
|
||||
const finishedMigration: MigrationMetadataFinished = {
|
||||
|
|
|
|||
|
|
@ -89,3 +89,9 @@ export class MigrationNotRunError extends EmigrateError {
|
|||
super('ERR_MIGRATION_NOT_RUN', message, options);
|
||||
}
|
||||
}
|
||||
|
||||
export class StorageInitError extends EmigrateError {
|
||||
constructor(message: string, options?: ErrorOptions) {
|
||||
super('ERR_STORAGE_INIT', message, options);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue