Compare commits

..

No commits in common. "52844d7a091c0f76449f73e50f1b23bc6eb7114c" and "4d124025951a1594f63c5bd33e28c0a3efb42c42" have entirely different histories.

54 changed files with 592 additions and 2010 deletions

View file

@ -13,7 +13,6 @@ jobs:
env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
DO_NOT_TRACK: 1
steps:
- name: Check out code
@ -26,7 +25,7 @@ jobs:
- name: Setup Node.js environment
uses: actions/setup-node@v4
with:
node-version: 22.15.0
node-version: 20.9.0
cache: 'pnpm'
- name: Install dependencies

View file

@ -33,7 +33,6 @@ jobs:
uses: withastro/action@v2
with:
path: ./docs # The root location of your Astro project inside the repository. (optional)
package-manager: pnpm@9.4.0 # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional)
deploy:
needs: build

View file

@ -1,62 +0,0 @@
name: Integration Tests
on:
push:
branches: ['main', 'changeset-release/main']
pull_request:
jobs:
mysql_integration:
name: Emigrate MySQL integration tests
timeout-minutes: 15
runs-on: ubuntu-latest
env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
DO_NOT_TRACK: 1
services:
mysql:
image: mysql:8.0
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: emigrate
MYSQL_USER: emigrate
MYSQL_PASSWORD: emigrate
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping -h localhost" --health-interval=10s --health-timeout=5s --health-retries=5
steps:
- name: Check out code
uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: pnpm/action-setup@v4.0.0
- name: Setup Node.js environment
uses: actions/setup-node@v4
with:
node-version: 22.15.0
cache: 'pnpm'
- name: Install dependencies
run: pnpm install
- name: Wait for MySQL to be ready
run: |
for i in {1..30}; do
nc -z localhost 3306 && echo "MySQL is up!" && break
echo "Waiting for MySQL..."
sleep 2
done
- name: Build package
run: pnpm build --filter @emigrate/mysql
- name: Integration Tests
env:
MYSQL_HOST: '127.0.0.1'
MYSQL_PORT: 3306
run: pnpm --filter @emigrate/mysql integration

View file

@ -30,43 +30,18 @@ jobs:
- name: Setup Node.js environment
uses: actions/setup-node@v4
with:
node-version: 22.15.0
node-version: 20.9.0
cache: 'pnpm'
- name: Install Dependencies
run: pnpm install
- name: Create Release Pull Request
id: changesets
uses: aboviq/changesets-action@v1.5.2
uses: changesets/action@v1.4.7
with:
publish: pnpm run release
commit: 'chore(release): version packages'
title: 'chore(release): version packages'
createGithubReleases: aggregate
env:
GITHUB_TOKEN: ${{ secrets.PAT_GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Release to @next tag on npm
if: github.ref_name == 'main' && steps.changesets.outputs.published != 'true'
run: |
git checkout main
CHANGESET_FILE=$(git diff-tree --no-commit-id --name-only HEAD -r ".changeset/*-*-*.md")
if [ -z "$CHANGESET_FILE" ]; then
echo "No changesets found, skipping release to @next tag"
exit 0
fi
AFFECTED_PACKAGES=$(sed -n '/---/,/---/p' "$CHANGESET_FILE" | sed '/---/d')
if [ -z "$AFFECTED_PACKAGES" ]; then
echo "No packages affected by changesets, skipping release to @next tag"
exit 0
fi
pnpm changeset version --snapshot next
pnpm changeset publish --tag next
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
GITHUB_TOKEN: ${{ secrets.PAT_GITHUB_TOKEN }}

View file

@ -11,7 +11,6 @@
"astro": "astro"
},
"dependencies": {
"@astrojs/check": "^0.7.0",
"@astrojs/starlight": "^0.15.0",
"@astrojs/starlight-tailwind": "2.0.1",
"@astrojs/tailwind": "^5.0.3",
@ -21,6 +20,5 @@
},
"volta": {
"extends": "../package.json"
},
"packageManager": "pnpm@9.4.0"
}
}

View file

@ -37,7 +37,7 @@
"bugs": "https://github.com/aboviq/emigrate/issues",
"license": "MIT",
"volta": {
"node": "22.15.0",
"node": "20.9.0",
"pnpm": "9.4.0"
},
"packageManager": "pnpm@9.4.0",
@ -62,10 +62,7 @@
},
"overrides": [
{
"files": [
"packages/**/*.test.ts",
"packages/**/*.integration.ts"
],
"files": "packages/**/*.test.ts",
"rules": {
"@typescript-eslint/no-floating-promises": 0,
"max-params": 0
@ -83,10 +80,9 @@
"lint-staged": "15.2.0",
"npm-run-all": "4.1.5",
"prettier": "3.1.1",
"testcontainers": "10.24.2",
"tsx": "4.15.7",
"turbo": "2.0.5",
"typescript": "5.5.2",
"tsx": "4.7.0",
"turbo": "1.10.16",
"typescript": "5.3.3",
"xo": "0.56.0"
}
}

View file

@ -1,14 +1,5 @@
# @emigrate/cli
## 0.18.4
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.18.3
### Patch Changes

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/cli",
"version": "0.18.4",
"version": "0.18.3",
"publishConfig": {
"access": "public",
"provenance": true

View file

@ -1,12 +1,12 @@
import { type MigrationHistoryEntry, type MigrationMetadata, type MigrationMetadataFinished } from '@emigrate/types';
import { toMigrationMetadata } from './to-migration-metadata.js';
import { getMigrations as getMigrationsOriginal, type GetMigrationsFunction } from './get-migrations.js';
import { getMigrations as getMigrationsOriginal } from './get-migrations.js';
export async function* collectMigrations(
cwd: string,
directory: string,
history: AsyncIterable<MigrationHistoryEntry>,
getMigrations: GetMigrationsFunction = getMigrationsOriginal,
getMigrations = getMigrationsOriginal,
): AsyncIterable<MigrationMetadata | MigrationMetadataFinished> {
const allMigrations = await getMigrations(cwd, directory);
const seen = new Set<string>();

View file

@ -17,7 +17,7 @@ export default async function listCommand({
storage: storageConfig,
color,
cwd,
}: Config & ExtraFlags): Promise<number> {
}: Config & ExtraFlags) {
if (!directory) {
throw MissingOptionError.fromOption('directory');
}

View file

@ -24,7 +24,7 @@ type ExtraFlags = {
export default async function newCommand(
{ directory, template, reporter: reporterConfig, plugins = [], cwd, extension, color }: Config & ExtraFlags,
name: string,
): Promise<void> {
) {
if (!directory) {
throw MissingOptionError.fromOption('directory');
}

View file

@ -11,7 +11,6 @@ import {
StorageInitError,
} from '../errors.js';
import {
assertErrorEqualEnough,
getErrorCause,
getMockedReporter,
getMockedStorage,
@ -200,11 +199,6 @@ function assertPreconditionsFailed(reporter: Mocked<Required<EmigrateReporter>>,
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
// hackety hack:
if (finishedError) {
finishedError.stack = error?.stack;
}
assert.deepStrictEqual(error, finishedError, 'Finished error');
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
@ -294,7 +288,14 @@ function assertPreconditionsFulfilled(
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
assertErrorEqualEnough(error, finishedError, 'Finished error');
assert.deepStrictEqual(error, finishedError, 'Finished error');
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
assert.deepStrictEqual(
cause,
expectedCause ? deserializeError(expectedCause) : expectedCause,
'Finished error cause',
);
assert.strictEqual(entries?.length, expected.length, 'Finished entries length');
assert.deepStrictEqual(
entries.map((entry) => `${entry.name} (${entry.status})`),

View file

@ -39,7 +39,7 @@ export default async function removeCommand(
getMigrations,
}: Config & ExtraFlags,
name: string,
): Promise<number> {
) {
if (!directory) {
throw MissingOptionError.fromOption('directory');
}

View file

@ -1,6 +1,13 @@
import { describe, it, mock } from 'node:test';
import assert from 'node:assert';
import { type EmigrateReporter, type Storage, type Plugin, type MigrationMetadataFinished } from '@emigrate/types';
import {
type EmigrateReporter,
type Storage,
type Plugin,
type SerializedError,
type MigrationMetadataFinished,
} from '@emigrate/types';
import { deserializeError } from 'serialize-error';
import { version } from '../get-package-info.js';
import {
BadOptionError,
@ -9,6 +16,7 @@ import {
MigrationHistoryError,
MigrationRunError,
StorageInitError,
toSerializedError,
} from '../errors.js';
import {
type Mocked,
@ -16,7 +24,7 @@ import {
toMigrations,
getMockedReporter,
getMockedStorage,
assertErrorEqualEnough,
getErrorCause,
} from '../test-utils.js';
import upCommand from './up.js';
@ -922,13 +930,15 @@ function assertPreconditionsFulfilled(
for (const [index, entry] of failedEntries.entries()) {
if (entry.status === 'failed') {
const error = reporter.onMigrationError.mock.calls[index]?.arguments[1];
assertErrorEqualEnough(error, entry.error, 'Error');
assert.deepStrictEqual(error, entry.error, 'Error');
const cause = entry.error?.cause;
assert.deepStrictEqual(error?.cause, cause ? deserializeError(cause) : cause, 'Error cause');
if (entry.started) {
const [finishedMigration, error] = storage.onError.mock.calls[index]?.arguments ?? [];
assert.strictEqual(finishedMigration?.name, entry.name);
assert.strictEqual(finishedMigration?.status, entry.status);
assertErrorEqualEnough(error, entry.error, `Entry error (${entry.name})`);
assertErrorEqualEnough(error, entry.error);
}
}
}
@ -936,7 +946,15 @@ function assertPreconditionsFulfilled(
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, pending + skipped, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
assertErrorEqualEnough(error, finishedError, 'Finished error');
assertErrorEqualEnough(error, finishedError);
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
assert.deepStrictEqual(
cause,
expectedCause ? deserializeError(expectedCause) : expectedCause,
'Finished error cause',
);
assert.strictEqual(entries?.length, expected.length, 'Finished entries length');
assert.deepStrictEqual(
entries.map((entry) => `${entry.name} (${entry.status})`),
@ -977,6 +995,33 @@ function assertPreconditionsFailed(
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
assertErrorEqualEnough(error, finishedError, 'Finished error');
assert.deepStrictEqual(error, finishedError, 'Finished error');
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
assert.deepStrictEqual(
cause,
expectedCause ? deserializeError(expectedCause) : expectedCause,
'Finished error cause',
);
assert.strictEqual(entries?.length, 0, 'Finished entries length');
}
function assertErrorEqualEnough(actual?: Error | SerializedError, expected?: Error) {
if (expected === undefined) {
assert.strictEqual(actual, undefined);
return;
}
const {
cause: actualCause,
stack: actualStack,
...actualError
} = actual instanceof Error ? toSerializedError(actual) : actual ?? {};
const { cause: expectedCause, stack: expectedStack, ...expectedError } = toSerializedError(expected);
// @ts-expect-error Ignore
const { stack: actualCauseStack, ...actualCauseRest } = actualCause ?? {};
// @ts-expect-error Ignore
const { stack: expectedCauseStack, ...expectedCauseRest } = expectedCause ?? {};
assert.deepStrictEqual(actualError, expectedError);
assert.deepStrictEqual(actualCauseRest, expectedCauseRest);
}

View file

@ -8,7 +8,7 @@ import { serializeError, errorConstructors, deserializeError } from 'serialize-e
const formatter = new Intl.ListFormat('en', { style: 'long', type: 'disjunction' });
export const toError = (error: unknown): Error => (error instanceof Error ? error : new Error(String(error)));
export const toError = (error: unknown) => (error instanceof Error ? error : new Error(String(error)));
export const toSerializedError = (error: unknown) => {
const errorInstance = toError(error);
@ -30,7 +30,7 @@ export class EmigrateError extends Error {
export class ShowUsageError extends EmigrateError {}
export class MissingOptionError extends ShowUsageError {
static fromOption(option: string | string[]): MissingOptionError {
static fromOption(option: string | string[]) {
return new MissingOptionError(
`Missing required option: ${Array.isArray(option) ? formatter.format(option) : option}`,
undefined,
@ -48,7 +48,7 @@ export class MissingOptionError extends ShowUsageError {
}
export class MissingArgumentsError extends ShowUsageError {
static fromArgument(argument: string): MissingArgumentsError {
static fromArgument(argument: string) {
return new MissingArgumentsError(`Missing required argument: ${argument}`, undefined, argument);
}
@ -62,7 +62,7 @@ export class MissingArgumentsError extends ShowUsageError {
}
export class OptionNeededError extends ShowUsageError {
static fromOption(option: string, message: string): OptionNeededError {
static fromOption(option: string, message: string) {
return new OptionNeededError(message, undefined, option);
}
@ -76,7 +76,7 @@ export class OptionNeededError extends ShowUsageError {
}
export class BadOptionError extends ShowUsageError {
static fromOption(option: string, message: string): BadOptionError {
static fromOption(option: string, message: string) {
return new BadOptionError(message, undefined, option);
}
@ -96,7 +96,7 @@ export class UnexpectedError extends EmigrateError {
}
export class MigrationHistoryError extends EmigrateError {
static fromHistoryEntry(entry: FailedMigrationHistoryEntry): MigrationHistoryError {
static fromHistoryEntry(entry: FailedMigrationHistoryEntry) {
return new MigrationHistoryError(`Migration ${entry.name} is in a failed state, it should be fixed and removed`, {
cause: deserializeError(entry.error),
});
@ -108,7 +108,7 @@ export class MigrationHistoryError extends EmigrateError {
}
export class MigrationLoadError extends EmigrateError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error): MigrationLoadError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error) {
return new MigrationLoadError(`Failed to load migration file: ${metadata.relativeFilePath}`, { cause });
}
@ -118,7 +118,7 @@ export class MigrationLoadError extends EmigrateError {
}
export class MigrationRunError extends EmigrateError {
static fromMetadata(metadata: FailedMigrationMetadata): MigrationRunError {
static fromMetadata(metadata: FailedMigrationMetadata) {
return new MigrationRunError(`Failed to run migration: ${metadata.relativeFilePath}`, { cause: metadata.error });
}
@ -128,7 +128,7 @@ export class MigrationRunError extends EmigrateError {
}
export class MigrationNotRunError extends EmigrateError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error): MigrationNotRunError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error) {
return new MigrationNotRunError(`Migration "${metadata.name}" is not in the migration history`, { cause });
}
@ -138,7 +138,7 @@ export class MigrationNotRunError extends EmigrateError {
}
export class MigrationRemovalError extends EmigrateError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error): MigrationRemovalError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error) {
return new MigrationRemovalError(`Failed to remove migration: ${metadata.relativeFilePath}`, { cause });
}
@ -148,7 +148,7 @@ export class MigrationRemovalError extends EmigrateError {
}
export class StorageInitError extends EmigrateError {
static fromError(error: Error): StorageInitError {
static fromError(error: Error) {
return new StorageInitError('Could not initialize storage', { cause: error });
}
@ -158,11 +158,11 @@ export class StorageInitError extends EmigrateError {
}
export class CommandAbortError extends EmigrateError {
static fromSignal(signal: NodeJS.Signals): CommandAbortError {
static fromSignal(signal: NodeJS.Signals) {
return new CommandAbortError(`Command aborted due to signal: ${signal}`);
}
static fromReason(reason: string, cause?: unknown): CommandAbortError {
static fromReason(reason: string, cause?: unknown) {
return new CommandAbortError(`Command aborted: ${reason}`, { cause });
}
@ -172,7 +172,7 @@ export class CommandAbortError extends EmigrateError {
}
export class ExecutionDesertedError extends EmigrateError {
static fromReason(reason: string, cause?: Error): ExecutionDesertedError {
static fromReason(reason: string, cause?: Error) {
return new ExecutionDesertedError(`Execution deserted: ${reason}`, { cause });
}

View file

@ -1,6 +1,6 @@
import process from 'node:process';
export const getDuration = (start: [number, number]): number => {
export const getDuration = (start: [number, number]) => {
const [seconds, nanoseconds] = process.hrtime(start);
return seconds * 1000 + nanoseconds / 1_000_000;
};

View file

@ -39,6 +39,6 @@ export const getMigrations = async (cwd: string, directory: string): Promise<Mig
extension: withLeadingPeriod(path.extname(name)),
directory,
cwd,
};
} satisfies MigrationMetadata;
});
};

View file

@ -28,7 +28,4 @@ const getPackageInfo = async () => {
throw new UnexpectedError(`Could not read package info from: ${packageInfoPath}`);
};
const packageInfo = await getPackageInfo();
// eslint-disable-next-line prefer-destructuring
export const version: string = packageInfo.version;
export const { version } = await getPackageInfo();

View file

@ -1,5 +1,5 @@
export * from './types.js';
export const emigrate = (): void => {
export const emigrate = () => {
// console.log('Done!');
};

View file

@ -471,6 +471,6 @@ class DefaultReporter implements Required<EmigrateReporter> {
}
}
const reporterDefault: EmigrateReporter = interactive ? new DefaultFancyReporter() : new DefaultReporter();
const reporterDefault = interactive ? new DefaultFancyReporter() : new DefaultReporter();
export default reporterDefault;

View file

@ -1,8 +1,7 @@
import type { EmigrateReporter } from '@emigrate/types';
import { type Config } from '../types.js';
import * as reporters from './index.js';
export const getStandardReporter = (reporter?: Config['reporter']): EmigrateReporter | undefined => {
export const getStandardReporter = (reporter?: Config['reporter']) => {
if (!reporter) {
return reporters.pretty;
}
@ -11,5 +10,5 @@ export const getStandardReporter = (reporter?: Config['reporter']): EmigrateRepo
return reporters[reporter as keyof typeof reporters];
}
return undefined;
return; // eslint-disable-line no-useless-return
};

View file

@ -55,6 +55,6 @@ class JsonReporter implements EmigrateReporter {
}
}
const jsonReporter: EmigrateReporter = new JsonReporter();
const jsonReporter = new JsonReporter() as EmigrateReporter;
export default jsonReporter;

View file

@ -1,6 +1,5 @@
import { mock, type Mock } from 'node:test';
import path from 'node:path';
import assert from 'node:assert';
import {
type SerializedError,
type EmigrateReporter,
@ -10,14 +9,13 @@ import {
type NonFailedMigrationHistoryEntry,
type Storage,
} from '@emigrate/types';
import { toSerializedError } from './errors.js';
export type Mocked<T> = {
// @ts-expect-error - This is a mock
[K in keyof T]: Mock<T[K]>;
};
export async function noop(): Promise<void> {
export async function noop() {
// noop
}
@ -33,8 +31,8 @@ export function getErrorCause(error: Error | undefined): Error | SerializedError
return undefined;
}
export function getMockedStorage(historyEntries: Array<string | MigrationHistoryEntry>): Mocked<Storage> {
return {
export function getMockedStorage(historyEntries: Array<string | MigrationHistoryEntry>) {
const storage: Mocked<Storage> = {
lock: mock.fn(async (migrations) => migrations),
unlock: mock.fn(async () => {
// void
@ -47,6 +45,8 @@ export function getMockedStorage(historyEntries: Array<string | MigrationHistory
onError: mock.fn(),
end: mock.fn(),
};
return storage;
}
export function getMockedReporter(): Mocked<Required<EmigrateReporter>> {
@ -112,23 +112,3 @@ export function toEntries(
): MigrationHistoryEntry[] {
return names.map((name) => (typeof name === 'string' ? toEntry(name, status) : name));
}
export function assertErrorEqualEnough(actual?: Error | SerializedError, expected?: Error, message?: string): void {
if (expected === undefined) {
assert.strictEqual(actual, undefined);
return;
}
const {
cause: actualCause,
stack: actualStack,
...actualError
} = actual instanceof Error ? toSerializedError(actual) : actual ?? {};
const { cause: expectedCause, stack: expectedStack, ...expectedError } = toSerializedError(expected);
// @ts-expect-error Ignore
const { stack: actualCauseStack, ...actualCauseRest } = actualCause ?? {};
// @ts-expect-error Ignore
const { stack: expectedCauseStack, ...expectedCauseRest } = expectedCause ?? {};
assert.deepStrictEqual(actualError, expectedError, message);
assert.deepStrictEqual(actualCauseRest, expectedCauseRest, message ? `${message} (cause)` : undefined);
}

View file

@ -1 +1 @@
export const withLeadingPeriod = (string: string): string => (string.startsWith('.') ? string : `.${string}`);
export const withLeadingPeriod = (string: string) => (string.startsWith('.') ? string : `.${string}`);

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,16 +1,5 @@
# @emigrate/mysql
## 0.3.3
### Patch Changes
- 26240f4: Make sure we can initialize multiple running instances of Emigrate using @emigrate/mysql concurrently without issues with creating the history table (for instance in a Kubernetes environment and/or with a Percona cluster).
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- 26240f4: Either lock all or none of the migrations to run to make sure they run in order when multiple instances of Emigrate runs concurrently (for instance in a Kubernetes environment)
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.3.2
### Patch Changes

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/mysql",
"version": "0.3.3",
"version": "0.3.2",
"publishConfig": {
"access": "public",
"provenance": true
@ -17,16 +17,12 @@
},
"files": [
"dist",
"!dist/*.tsbuildinfo",
"!dist/**/*.test.js",
"!dist/tests/*"
"!dist/*.tsbuildinfo"
],
"scripts": {
"build": "tsc --pretty",
"build:watch": "tsc --pretty --watch",
"lint": "xo --cwd=../.. $(pwd)",
"integration": "glob -c \"node --import tsx --test-reporter spec --test\" \"./src/**/*.integration.ts\"",
"integration:watch": "glob -c \"node --watch --import tsx --test-reporter spec --test\" \"./src/**/*.integration.ts\""
"lint": "xo --cwd=../.. $(pwd)"
},
"keywords": [
"emigrate",

View file

@ -1,103 +0,0 @@
import assert from 'node:assert';
import path from 'node:path';
import { before, after, describe, it } from 'node:test';
import type { MigrationMetadata } from '@emigrate/types';
import { startDatabase, stopDatabase } from './tests/database.js';
import { createMysqlStorage } from './index.js';
let db: { port: number; host: string };
const toEnd = new Set<{ end: () => Promise<void> }>();
describe('emigrate-mysql', async () => {
before(
async () => {
db = await startDatabase();
},
{ timeout: 60_000 },
);
after(
async () => {
for (const storage of toEnd) {
// eslint-disable-next-line no-await-in-loop
await storage.end();
}
toEnd.clear();
await stopDatabase();
},
{ timeout: 10_000 },
);
describe('migration locks', async () => {
it('either locks none or all of the given migrations', async () => {
const { initializeStorage } = createMysqlStorage({
table: 'migrations',
connection: {
host: db.host,
user: 'emigrate',
password: 'emigrate',
database: 'emigrate',
port: db.port,
},
});
const [storage1, storage2] = await Promise.all([initializeStorage(), initializeStorage()]);
toEnd.add(storage1);
toEnd.add(storage2);
const migrations = toMigrations('/emigrate', 'migrations', [
'2023-10-01-01-test.js',
'2023-10-01-02-test.js',
'2023-10-01-03-test.js',
'2023-10-01-04-test.js',
'2023-10-01-05-test.js',
'2023-10-01-06-test.js',
'2023-10-01-07-test.js',
'2023-10-01-08-test.js',
'2023-10-01-09-test.js',
'2023-10-01-10-test.js',
'2023-10-01-11-test.js',
'2023-10-01-12-test.js',
'2023-10-01-13-test.js',
'2023-10-01-14-test.js',
'2023-10-01-15-test.js',
'2023-10-01-16-test.js',
'2023-10-01-17-test.js',
'2023-10-01-18-test.js',
'2023-10-01-19-test.js',
'2023-10-01-20-test.js',
]);
const [locked1, locked2] = await Promise.all([storage1.lock(migrations), storage2.lock(migrations)]);
assert.strictEqual(
locked1.length === 0 || locked2.length === 0,
true,
'One of the processes should have no locks',
);
assert.strictEqual(
locked1.length === 20 || locked2.length === 20,
true,
'One of the processes should have all locks',
);
});
});
});
function toMigration(cwd: string, directory: string, name: string): MigrationMetadata {
return {
name,
filePath: `${cwd}/${directory}/${name}`,
relativeFilePath: `${directory}/${name}`,
extension: path.extname(name),
directory,
cwd,
};
}
function toMigrations(cwd: string, directory: string, names: string[]): MigrationMetadata[] {
return names.map((name) => toMigration(cwd, directory, name));
}

View file

@ -1,6 +1,5 @@
import process from 'node:process';
import fs from 'node:fs/promises';
import { setTimeout } from 'node:timers/promises';
import {
createConnection,
createPool,
@ -14,9 +13,7 @@ import {
} from 'mysql2/promise';
import { getTimestampPrefix, sanitizeMigrationName } from '@emigrate/plugin-tools';
import {
type Awaitable,
type MigrationMetadata,
type MigrationFunction,
type EmigrateStorage,
type LoaderPlugin,
type Storage,
@ -55,7 +52,6 @@ const getConnection = async (options: ConnectionOptions | string) => {
// best to leave this at 0 (disabled)
uri.searchParams.set('connectTimeout', '0');
uri.searchParams.set('multipleStatements', 'true');
uri.searchParams.set('flags', '-FOUND_ROWS');
connection = await createConnection(uri.toString());
} else {
@ -66,7 +62,6 @@ const getConnection = async (options: ConnectionOptions | string) => {
// best to leave this at 0 (disabled)
connectTimeout: 0,
multipleStatements: true,
flags: ['-FOUND_ROWS'],
});
}
@ -87,7 +82,6 @@ const getPool = (connection: PoolOptions | string) => {
// it throws an error you can't catch and crashes node
// best to leave this at 0 (disabled)
uri.searchParams.set('connectTimeout', '0');
uri.searchParams.set('flags', '-FOUND_ROWS');
return createPool(uri.toString());
}
@ -98,7 +92,6 @@ const getPool = (connection: PoolOptions | string) => {
// it throws an error you can't catch and crashes node
// best to leave this at 0 (disabled)
connectTimeout: 0,
flags: ['-FOUND_ROWS'],
});
};
@ -109,8 +102,8 @@ type HistoryEntry = {
error?: SerializedError;
};
const lockMigration = async (connection: Connection, table: string, migration: MigrationMetadata) => {
const [result] = await connection.execute<ResultSetHeader>({
const lockMigration = async (pool: Pool, table: string, migration: MigrationMetadata) => {
const [result] = await pool.execute<ResultSetHeader>({
sql: `
INSERT INTO ${escapeId(table)} (name, status, date)
VALUES (?, ?, NOW())
@ -233,10 +226,8 @@ const initializeDatabase = async (config: ConnectionOptions | string) => {
}
};
const lockWaitTimeout = 10; // seconds
const isHistoryTableExisting = async (connection: Connection, table: string) => {
const [result] = await connection.execute<RowDataPacket[]>({
const initializeTable = async (pool: Pool, table: string) => {
const [result] = await pool.execute<RowDataPacket[]>({
sql: `
SELECT
1 as table_exists
@ -249,70 +240,24 @@ const isHistoryTableExisting = async (connection: Connection, table: string) =>
values: [table],
});
return result[0]?.['table_exists'] === 1;
};
const initializeTable = async (config: ConnectionOptions | string, table: string) => {
const connection = await getConnection(config);
if (await isHistoryTableExisting(connection, table)) {
await connection.end();
if (result[0]?.['table_exists']) {
return;
}
const lockName = `emigrate_init_table_lock_${table}`;
const [lockResult] = await connection.query<RowDataPacket[]>(`SELECT GET_LOCK(?, ?) AS got_lock`, [
lockName,
lockWaitTimeout,
]);
const didGetLock = lockResult[0]?.['got_lock'] === 1;
if (didGetLock) {
try {
// This table definition is compatible with the one used by the immigration-mysql package
await connection.execute(`
CREATE TABLE IF NOT EXISTS ${escapeId(table)} (
name varchar(255) not null primary key,
status varchar(32),
date datetime not null
) Engine=InnoDB;
`);
} finally {
await connection.query(`SELECT RELEASE_LOCK(?)`, [lockName]);
await connection.end();
}
return;
}
// Didn't get the lock, wait to see if the table was created by another process
const maxWait = lockWaitTimeout * 1000; // milliseconds
const checkInterval = 250; // milliseconds
const start = Date.now();
try {
while (Date.now() - start < maxWait) {
// eslint-disable-next-line no-await-in-loop
if (await isHistoryTableExisting(connection, table)) {
return;
}
// eslint-disable-next-line no-await-in-loop
await setTimeout(checkInterval);
}
throw new Error(`Timeout waiting for table ${table} to be created by other process`);
} finally {
await connection.end();
}
// This table definition is compatible with the one used by the immigration-mysql package
await pool.execute(`
CREATE TABLE ${escapeId(table)} (
name varchar(255) not null primary key,
status varchar(32),
date datetime not null
) Engine=InnoDB;
`);
};
export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlStorageOptions): EmigrateStorage => {
return {
async initializeStorage() {
await initializeDatabase(connection);
await initializeTable(connection, table);
const pool = getPool(connection);
@ -324,35 +269,24 @@ export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlSt
});
}
try {
await initializeTable(pool, table);
} catch (error) {
await pool.end();
throw error;
}
const storage: Storage = {
async lock(migrations) {
const connection = await pool.getConnection();
const lockedMigrations: MigrationMetadata[] = [];
try {
await connection.beginTransaction();
const lockedMigrations: MigrationMetadata[] = [];
for await (const migration of migrations) {
if (await lockMigration(connection, table, migration)) {
lockedMigrations.push(migration);
}
for await (const migration of migrations) {
if (await lockMigration(pool, table, migration)) {
lockedMigrations.push(migration);
}
if (lockedMigrations.length === migrations.length) {
await connection.commit();
return lockedMigrations;
}
await connection.rollback();
return [];
} catch (error) {
await connection.rollback();
throw error;
} finally {
connection.release();
}
return lockedMigrations;
},
async unlock(migrations) {
for await (const migration of migrations) {
@ -411,6 +345,17 @@ export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlSt
};
};
export const { initializeStorage } = createMysqlStorage({
table: process.env['MYSQL_TABLE'],
connection: process.env['MYSQL_URL'] ?? {
host: process.env['MYSQL_HOST'],
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
user: process.env['MYSQL_USER'],
password: process.env['MYSQL_PASSWORD'],
database: process.env['MYSQL_DATABASE'],
},
});
export const createMysqlLoader = ({ connection }: MysqlLoaderOptions): LoaderPlugin => {
return {
loadableExtensions: ['.sql'],
@ -429,6 +374,16 @@ export const createMysqlLoader = ({ connection }: MysqlLoaderOptions): LoaderPlu
};
};
export const { loadableExtensions, loadMigration } = createMysqlLoader({
connection: process.env['MYSQL_URL'] ?? {
host: process.env['MYSQL_HOST'],
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
user: process.env['MYSQL_USER'],
password: process.env['MYSQL_PASSWORD'],
database: process.env['MYSQL_DATABASE'],
},
});
export const generateMigration: GenerateMigrationFunction = async (name) => {
return {
filename: `${getTimestampPrefix()}_${sanitizeMigrationName(name)}.sql`,
@ -437,34 +392,6 @@ export const generateMigration: GenerateMigrationFunction = async (name) => {
};
};
const storage = createMysqlStorage({
table: process.env['MYSQL_TABLE'],
connection: process.env['MYSQL_URL'] ?? {
host: process.env['MYSQL_HOST'],
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
user: process.env['MYSQL_USER'],
password: process.env['MYSQL_PASSWORD'],
database: process.env['MYSQL_DATABASE'],
},
});
const loader = createMysqlLoader({
connection: process.env['MYSQL_URL'] ?? {
host: process.env['MYSQL_HOST'],
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
user: process.env['MYSQL_USER'],
password: process.env['MYSQL_PASSWORD'],
database: process.env['MYSQL_DATABASE'],
},
});
// eslint-disable-next-line prefer-destructuring
export const initializeStorage: () => Promise<Storage> = storage.initializeStorage;
// eslint-disable-next-line prefer-destructuring
export const loadableExtensions: string[] = loader.loadableExtensions;
// eslint-disable-next-line prefer-destructuring
export const loadMigration: (migration: MigrationMetadata) => Awaitable<MigrationFunction> = loader.loadMigration;
const defaultExport: EmigrateStorage & LoaderPlugin & GeneratorPlugin = {
initializeStorage,
loadableExtensions,

View file

@ -1,49 +0,0 @@
/* eslint @typescript-eslint/naming-convention:0, import/no-extraneous-dependencies: 0 */
import process from 'node:process';
import { GenericContainer, type StartedTestContainer } from 'testcontainers';
let container: StartedTestContainer | undefined;
export const startDatabase = async (): Promise<{ port: number; host: string }> => {
if (process.env['CI']) {
const config = {
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : 3306,
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
host: process.env['MYSQL_HOST'] || 'localhost',
};
console.log(`Connecting to MySQL from environment variables: ${JSON.stringify(config)}`);
return config;
}
if (!container) {
console.log('Starting MySQL container...');
const containerSetup = new GenericContainer('mysql:8.2')
.withEnvironment({
MYSQL_ROOT_PASSWORD: 'admin',
MYSQL_USER: 'emigrate',
MYSQL_PASSWORD: 'emigrate',
MYSQL_DATABASE: 'emigrate',
})
.withTmpFs({ '/var/lib/mysql': 'rw' })
.withCommand(['--sql-mode=NO_ENGINE_SUBSTITUTION', '--default-authentication-plugin=mysql_native_password'])
.withExposedPorts(3306)
.withReuse();
container = await containerSetup.start();
console.log('MySQL container started');
}
return { port: container.getMappedPort(3306), host: container.getHost() };
};
export const stopDatabase = async (): Promise<void> => {
if (container) {
console.log('Stopping MySQL container...');
await container.stop();
console.log('MySQL container stopped');
container = undefined;
}
};

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,13 +1,5 @@
# @emigrate/plugin-generate-js
## 0.3.8
### Patch Changes
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.3.7
### Patch Changes

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/plugin-generate-js",
"version": "0.3.8",
"version": "0.3.7",
"publishConfig": {
"access": "public"
},

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,12 +1,5 @@
# @emigrate/plugin-tools
## 0.9.8
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- @emigrate/types@0.12.2
## 0.9.7
### Patch Changes

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/plugin-tools",
"version": "0.9.8",
"version": "0.9.7",
"publishConfig": {
"access": "public",
"provenance": true

View file

@ -204,7 +204,7 @@ const load = async <T>(
*
* @returns A timestamp string in the format YYYYMMDDHHmmssmmm
*/
export const getTimestampPrefix = (): string => new Date().toISOString().replaceAll(/[-:ZT.]/g, '');
export const getTimestampPrefix = () => new Date().toISOString().replaceAll(/[-:ZT.]/g, '');
/**
* A utility function to sanitize a migration name so that it can be used as a filename
@ -212,7 +212,7 @@ export const getTimestampPrefix = (): string => new Date().toISOString().replace
* @param name A migration name to sanitize
* @returns A sanitized migration name that can be used as a filename
*/
export const sanitizeMigrationName = (name: string): string =>
export const sanitizeMigrationName = (name: string) =>
name
.replaceAll(/[\W/\\:|*?'"<>_]+/g, '_')
.trim()

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,14 +1,5 @@
# @emigrate/postgres
## 0.3.2
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.3.1
### Patch Changes

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/postgres",
"version": "0.3.2",
"version": "0.3.1",
"publishConfig": {
"access": "public",
"provenance": true

View file

@ -11,8 +11,6 @@ import {
type GeneratorPlugin,
type SerializedError,
type MigrationHistoryEntry,
type Awaitable,
type MigrationFunction,
} from '@emigrate/types';
const defaultTable = 'migrations';
@ -257,6 +255,17 @@ export const createPostgresStorage = ({
};
};
export const { initializeStorage } = createPostgresStorage({
table: process.env['POSTGRES_TABLE'],
connection: process.env['POSTGRES_URL'] ?? {
host: process.env['POSTGRES_HOST'],
port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
user: process.env['POSTGRES_USER'],
password: process.env['POSTGRES_PASSWORD'],
database: process.env['POSTGRES_DB'],
},
});
export const createPostgresLoader = ({ connection }: PostgresLoaderOptions): LoaderPlugin => {
return {
loadableExtensions: ['.sql'],
@ -275,6 +284,16 @@ export const createPostgresLoader = ({ connection }: PostgresLoaderOptions): Loa
};
};
export const { loadableExtensions, loadMigration } = createPostgresLoader({
connection: process.env['POSTGRES_URL'] ?? {
host: process.env['POSTGRES_HOST'],
port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
user: process.env['POSTGRES_USER'],
password: process.env['POSTGRES_PASSWORD'],
database: process.env['POSTGRES_DB'],
},
});
export const generateMigration: GenerateMigrationFunction = async (name) => {
return {
filename: `${getTimestampPrefix()}_${sanitizeMigrationName(name)}.sql`,
@ -283,34 +302,6 @@ export const generateMigration: GenerateMigrationFunction = async (name) => {
};
};
const storage = createPostgresStorage({
table: process.env['POSTGRES_TABLE'],
connection: process.env['POSTGRES_URL'] ?? {
host: process.env['POSTGRES_HOST'],
port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
user: process.env['POSTGRES_USER'],
password: process.env['POSTGRES_PASSWORD'],
database: process.env['POSTGRES_DB'],
},
});
const loader = createPostgresLoader({
connection: process.env['POSTGRES_URL'] ?? {
host: process.env['POSTGRES_HOST'],
port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
user: process.env['POSTGRES_USER'],
password: process.env['POSTGRES_PASSWORD'],
database: process.env['POSTGRES_DB'],
},
});
// eslint-disable-next-line prefer-destructuring
export const initializeStorage: () => Promise<Storage> = storage.initializeStorage;
// eslint-disable-next-line prefer-destructuring
export const loadableExtensions: string[] = loader.loadableExtensions;
// eslint-disable-next-line prefer-destructuring
export const loadMigration: (migration: MigrationMetadata) => Awaitable<MigrationFunction> = loader.loadMigration;
const defaultExport: EmigrateStorage & LoaderPlugin & GeneratorPlugin = {
initializeStorage,
loadableExtensions,

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,12 +1,5 @@
# @emigrate/reporter-pino
## 0.6.5
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- @emigrate/types@0.12.2
## 0.6.4
### Patch Changes

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/reporter-pino",
"version": "0.6.5",
"version": "0.6.4",
"publishConfig": {
"access": "public",
"provenance": true

View file

@ -204,8 +204,6 @@ export const createPinoReporter = (options: PinoReporterOptions = {}): EmigrateR
return new PinoReporter(options);
};
const defaultExport: EmigrateReporter = createPinoReporter({
export default createPinoReporter({
level: process.env['LOG_LEVEL'],
});
export default defaultExport;

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,11 +1,5 @@
# @emigrate/tsconfig
## 1.0.3
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
## 1.0.2
### Patch Changes

View file

@ -11,7 +11,6 @@
"forceConsistentCasingInFileNames": true,
"inlineSources": false,
"isolatedModules": true,
"isolatedDeclarations": true,
"incremental": true,
"module": "NodeNext",
"moduleResolution": "NodeNext",
@ -32,7 +31,5 @@
"strict": true,
"target": "ES2022",
"lib": ["ESNext", "DOM", "DOM.Iterable"]
},
"include": ["${configDir}/src"],
"exclude": ["${configDir}/dist"]
}
}

View file

@ -3,7 +3,6 @@
"display": "Build",
"extends": "./base.json",
"compilerOptions": {
"noEmit": false,
"outDir": "${configDir}/dist"
"noEmit": false
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@emigrate/tsconfig",
"version": "1.0.3",
"version": "1.0.2",
"publishConfig": {
"access": "public",
"provenance": true

View file

@ -1,3 +1,8 @@
{
"extends": "@emigrate/tsconfig/build.json"
"extends": "@emigrate/tsconfig/build.json",
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

1803
pnpm-lock.yaml generated

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,6 @@
{
"$schema": "https://turborepo.org/schema.json",
"ui": "stream",
"tasks": {
"pipeline": {
"build": {
"dependsOn": ["^build"],
"inputs": ["src/**/*", "!src/**/*.test.ts", "tsconfig.json", "tsconfig.build.json"],