Compare commits

..

16 commits

Author SHA1 Message Date
52844d7a09 ci(mysql): add @emigrate/mysql integration tests to GitHub Actions
Some checks failed
Deploy to GitHub Pages / build (push) Failing after 2m38s
Deploy to GitHub Pages / deploy (push) Has been skipped
Integration Tests / Emigrate MySQL integration tests (push) Failing after 4m0s
Release / Release (push) Failing after 12s
CI / Build and Test (push) Has been cancelled
2025-04-25 09:48:34 +02:00
github-actions[bot]
fa3fb20dc5 chore(release): version packages 2025-04-24 16:06:29 +02:00
26240f49ff fix(mysql): make sure migrations are run in order when run concurrently
Now we either lock all or none of the migrations to run,
to make sure they are not out of order when multiple instances of Emigrate run concurrently.
2025-04-24 15:57:44 +02:00
6eb60177c5 fix: use another changesets-action version 2024-08-09 16:03:34 +02:00
b3b603b2fc feat: make aggregated GitHub releases instead of one per package
And also publish packages with unreleased changes tagged with `next` to NPM
2024-08-09 15:49:22 +02:00
bb9d674cd7 chore: turn off Turbo's UI as it messes with the terminal and is not as intuitive as it seems 2024-06-27 16:05:45 +02:00
c151031d41 chore(deps): upgrade Turbo and opt out from telemetry 2024-06-27 16:05:45 +02:00
dependabot[bot]
48181d88b7 chore(deps): bump turbo from 1.10.16 to 2.0.5
Bumps [turbo](https://github.com/vercel/turbo) from 1.10.16 to 2.0.5.
- [Release notes](https://github.com/vercel/turbo/releases)
- [Changelog](https://github.com/vercel/turbo/blob/main/release.md)
- [Commits](https://github.com/vercel/turbo/compare/v1.10.16...v2.0.5)

---
updated-dependencies:
- dependency-name: turbo
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-06-27 16:05:45 +02:00
d779286084 chore(deps): upgrade TypeScript to v5.5 and enable isolatedDeclarations 2024-06-27 15:38:50 +02:00
ef848a0553 chore(deps): re-add the specific PNPM version for the deploy workflow 2024-06-27 13:27:34 +02:00
4d12402595 chore(deps): make sure the correct PNPM version is used (everywhere) 2024-06-27 11:55:33 +02:00
be5c4d28b6 chore(deps): make sure the correct PNPM version is used 2024-06-27 11:47:40 +02:00
2cefa2508b chore(deps): upgrade PNPM to v9.4.0 2024-06-27 11:12:21 +02:00
0ff9f60d59 chore(deps): upgrade all action dependencies
Closes #70, #128, #135, #145
2024-06-27 10:59:47 +02:00
github-actions[bot]
31693ddb3c chore(release): version packages 2024-06-25 09:21:37 +02:00
57498db248 fix(mysql): close database connections gracefully when using Bun 2024-06-25 08:22:56 +02:00
54 changed files with 7629 additions and 5079 deletions

View file

@ -13,6 +13,7 @@ jobs:
env: env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }} TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
DO_NOT_TRACK: 1
steps: steps:
- name: Check out code - name: Check out code
@ -20,14 +21,12 @@ jobs:
with: with:
fetch-depth: 2 fetch-depth: 2
- uses: pnpm/action-setup@v3.0.0 - uses: pnpm/action-setup@v4.0.0
with:
version: 8.3.1
- name: Setup Node.js environment - name: Setup Node.js environment
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 20.9.0 node-version: 22.15.0
cache: 'pnpm' cache: 'pnpm'
- name: Install dependencies - name: Install dependencies

View file

@ -10,6 +10,7 @@ on:
# Allow this job to clone the repo and create a page deployment # Allow this job to clone the repo and create a page deployment
permissions: permissions:
actions: read
contents: read contents: read
pages: write pages: write
id-token: write id-token: write
@ -29,11 +30,10 @@ jobs:
echo $ASTRO_SITE echo $ASTRO_SITE
echo $ASTRO_BASE echo $ASTRO_BASE
- name: Install, build, and upload your site output - name: Install, build, and upload your site output
uses: withastro/action@v1 uses: withastro/action@v2
with: with:
path: ./docs # The root location of your Astro project inside the repository. (optional) path: ./docs # The root location of your Astro project inside the repository. (optional)
node-version: 20 # The specific version of Node that should be used to build your site. Defaults to 18. (optional) package-manager: pnpm@9.4.0 # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional)
package-manager: pnpm@8.10.2 # The Node package manager that should be used to install dependencies and build your site. Automatically detected based on your lockfile. (optional)
deploy: deploy:
needs: build needs: build
@ -44,4 +44,4 @@ jobs:
steps: steps:
- name: Deploy to GitHub Pages - name: Deploy to GitHub Pages
id: deployment id: deployment
uses: actions/deploy-pages@v1 uses: actions/deploy-pages@v4

62
.github/workflows/integration.yaml vendored Normal file
View file

@ -0,0 +1,62 @@
name: Integration Tests
on:
push:
branches: ['main', 'changeset-release/main']
pull_request:
jobs:
mysql_integration:
name: Emigrate MySQL integration tests
timeout-minutes: 15
runs-on: ubuntu-latest
env:
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
DO_NOT_TRACK: 1
services:
mysql:
image: mysql:8.0
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: emigrate
MYSQL_USER: emigrate
MYSQL_PASSWORD: emigrate
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping -h localhost" --health-interval=10s --health-timeout=5s --health-retries=5
steps:
- name: Check out code
uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: pnpm/action-setup@v4.0.0
- name: Setup Node.js environment
uses: actions/setup-node@v4
with:
node-version: 22.15.0
cache: 'pnpm'
- name: Install dependencies
run: pnpm install
- name: Wait for MySQL to be ready
run: |
for i in {1..30}; do
nc -z localhost 3306 && echo "MySQL is up!" && break
echo "Waiting for MySQL..."
sleep 2
done
- name: Build package
run: pnpm build --filter @emigrate/mysql
- name: Integration Tests
env:
MYSQL_HOST: '127.0.0.1'
MYSQL_PORT: 3306
run: pnpm --filter @emigrate/mysql integration

View file

@ -25,25 +25,48 @@ jobs:
persist-credentials: false persist-credentials: false
fetch-depth: 0 fetch-depth: 0
- uses: pnpm/action-setup@v3.0.0 - uses: pnpm/action-setup@v4.0.0
with:
version: 8.3.1
- name: Setup Node.js environment - name: Setup Node.js environment
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 20.9.0 node-version: 22.15.0
cache: 'pnpm' cache: 'pnpm'
- name: Install Dependencies - name: Install Dependencies
run: pnpm install run: pnpm install
- name: Create Release Pull Request - name: Create Release Pull Request
uses: changesets/action@v1.4.6 id: changesets
uses: aboviq/changesets-action@v1.5.2
with: with:
publish: pnpm run release publish: pnpm run release
commit: 'chore(release): version packages' commit: 'chore(release): version packages'
title: 'chore(release): version packages' title: 'chore(release): version packages'
createGithubReleases: aggregate
env: env:
GITHUB_TOKEN: ${{ secrets.PAT_GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.PAT_GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Release to @next tag on npm
if: github.ref_name == 'main' && steps.changesets.outputs.published != 'true'
run: |
git checkout main
CHANGESET_FILE=$(git diff-tree --no-commit-id --name-only HEAD -r ".changeset/*-*-*.md")
if [ -z "$CHANGESET_FILE" ]; then
echo "No changesets found, skipping release to @next tag"
exit 0
fi
AFFECTED_PACKAGES=$(sed -n '/---/,/---/p' "$CHANGESET_FILE" | sed '/---/d')
if [ -z "$AFFECTED_PACKAGES" ]; then
echo "No packages affected by changesets, skipping release to @next tag"
exit 0
fi
pnpm changeset version --snapshot next
pnpm changeset publish --tag next
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
GITHUB_TOKEN: ${{ secrets.PAT_GITHUB_TOKEN }}

View file

@ -11,6 +11,7 @@
"astro": "astro" "astro": "astro"
}, },
"dependencies": { "dependencies": {
"@astrojs/check": "^0.7.0",
"@astrojs/starlight": "^0.15.0", "@astrojs/starlight": "^0.15.0",
"@astrojs/starlight-tailwind": "2.0.1", "@astrojs/starlight-tailwind": "2.0.1",
"@astrojs/tailwind": "^5.0.3", "@astrojs/tailwind": "^5.0.3",
@ -20,5 +21,6 @@
}, },
"volta": { "volta": {
"extends": "../package.json" "extends": "../package.json"
} },
"packageManager": "pnpm@9.4.0"
} }

View file

@ -37,9 +37,10 @@
"bugs": "https://github.com/aboviq/emigrate/issues", "bugs": "https://github.com/aboviq/emigrate/issues",
"license": "MIT", "license": "MIT",
"volta": { "volta": {
"node": "20.9.0", "node": "22.15.0",
"pnpm": "8.10.2" "pnpm": "9.4.0"
}, },
"packageManager": "pnpm@9.4.0",
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
@ -61,7 +62,10 @@
}, },
"overrides": [ "overrides": [
{ {
"files": "packages/**/*.test.ts", "files": [
"packages/**/*.test.ts",
"packages/**/*.integration.ts"
],
"rules": { "rules": {
"@typescript-eslint/no-floating-promises": 0, "@typescript-eslint/no-floating-promises": 0,
"max-params": 0 "max-params": 0
@ -79,9 +83,10 @@
"lint-staged": "15.2.0", "lint-staged": "15.2.0",
"npm-run-all": "4.1.5", "npm-run-all": "4.1.5",
"prettier": "3.1.1", "prettier": "3.1.1",
"tsx": "4.7.0", "testcontainers": "10.24.2",
"turbo": "1.10.16", "tsx": "4.15.7",
"typescript": "5.3.3", "turbo": "2.0.5",
"typescript": "5.5.2",
"xo": "0.56.0" "xo": "0.56.0"
} }
} }

View file

@ -1,5 +1,14 @@
# @emigrate/cli # @emigrate/cli
## 0.18.4
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.18.3 ## 0.18.3
### Patch Changes ### Patch Changes

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/cli", "name": "@emigrate/cli",
"version": "0.18.3", "version": "0.18.4",
"publishConfig": { "publishConfig": {
"access": "public", "access": "public",
"provenance": true "provenance": true

View file

@ -1,12 +1,12 @@
import { type MigrationHistoryEntry, type MigrationMetadata, type MigrationMetadataFinished } from '@emigrate/types'; import { type MigrationHistoryEntry, type MigrationMetadata, type MigrationMetadataFinished } from '@emigrate/types';
import { toMigrationMetadata } from './to-migration-metadata.js'; import { toMigrationMetadata } from './to-migration-metadata.js';
import { getMigrations as getMigrationsOriginal } from './get-migrations.js'; import { getMigrations as getMigrationsOriginal, type GetMigrationsFunction } from './get-migrations.js';
export async function* collectMigrations( export async function* collectMigrations(
cwd: string, cwd: string,
directory: string, directory: string,
history: AsyncIterable<MigrationHistoryEntry>, history: AsyncIterable<MigrationHistoryEntry>,
getMigrations = getMigrationsOriginal, getMigrations: GetMigrationsFunction = getMigrationsOriginal,
): AsyncIterable<MigrationMetadata | MigrationMetadataFinished> { ): AsyncIterable<MigrationMetadata | MigrationMetadataFinished> {
const allMigrations = await getMigrations(cwd, directory); const allMigrations = await getMigrations(cwd, directory);
const seen = new Set<string>(); const seen = new Set<string>();

View file

@ -17,7 +17,7 @@ export default async function listCommand({
storage: storageConfig, storage: storageConfig,
color, color,
cwd, cwd,
}: Config & ExtraFlags) { }: Config & ExtraFlags): Promise<number> {
if (!directory) { if (!directory) {
throw MissingOptionError.fromOption('directory'); throw MissingOptionError.fromOption('directory');
} }

View file

@ -24,7 +24,7 @@ type ExtraFlags = {
export default async function newCommand( export default async function newCommand(
{ directory, template, reporter: reporterConfig, plugins = [], cwd, extension, color }: Config & ExtraFlags, { directory, template, reporter: reporterConfig, plugins = [], cwd, extension, color }: Config & ExtraFlags,
name: string, name: string,
) { ): Promise<void> {
if (!directory) { if (!directory) {
throw MissingOptionError.fromOption('directory'); throw MissingOptionError.fromOption('directory');
} }

View file

@ -11,6 +11,7 @@ import {
StorageInitError, StorageInitError,
} from '../errors.js'; } from '../errors.js';
import { import {
assertErrorEqualEnough,
getErrorCause, getErrorCause,
getMockedReporter, getMockedReporter,
getMockedStorage, getMockedStorage,
@ -199,6 +200,11 @@ function assertPreconditionsFailed(reporter: Mocked<Required<EmigrateReporter>>,
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped'); assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once'); assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? []; const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
// hackety hack:
if (finishedError) {
finishedError.stack = error?.stack;
}
assert.deepStrictEqual(error, finishedError, 'Finished error'); assert.deepStrictEqual(error, finishedError, 'Finished error');
const cause = getErrorCause(error); const cause = getErrorCause(error);
const expectedCause = finishedError?.cause; const expectedCause = finishedError?.cause;
@ -288,14 +294,7 @@ function assertPreconditionsFulfilled(
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped'); assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once'); assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? []; const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
assert.deepStrictEqual(error, finishedError, 'Finished error'); assertErrorEqualEnough(error, finishedError, 'Finished error');
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
assert.deepStrictEqual(
cause,
expectedCause ? deserializeError(expectedCause) : expectedCause,
'Finished error cause',
);
assert.strictEqual(entries?.length, expected.length, 'Finished entries length'); assert.strictEqual(entries?.length, expected.length, 'Finished entries length');
assert.deepStrictEqual( assert.deepStrictEqual(
entries.map((entry) => `${entry.name} (${entry.status})`), entries.map((entry) => `${entry.name} (${entry.status})`),

View file

@ -39,7 +39,7 @@ export default async function removeCommand(
getMigrations, getMigrations,
}: Config & ExtraFlags, }: Config & ExtraFlags,
name: string, name: string,
) { ): Promise<number> {
if (!directory) { if (!directory) {
throw MissingOptionError.fromOption('directory'); throw MissingOptionError.fromOption('directory');
} }

View file

@ -1,13 +1,6 @@
import { describe, it, mock } from 'node:test'; import { describe, it, mock } from 'node:test';
import assert from 'node:assert'; import assert from 'node:assert';
import { import { type EmigrateReporter, type Storage, type Plugin, type MigrationMetadataFinished } from '@emigrate/types';
type EmigrateReporter,
type Storage,
type Plugin,
type SerializedError,
type MigrationMetadataFinished,
} from '@emigrate/types';
import { deserializeError } from 'serialize-error';
import { version } from '../get-package-info.js'; import { version } from '../get-package-info.js';
import { import {
BadOptionError, BadOptionError,
@ -16,7 +9,6 @@ import {
MigrationHistoryError, MigrationHistoryError,
MigrationRunError, MigrationRunError,
StorageInitError, StorageInitError,
toSerializedError,
} from '../errors.js'; } from '../errors.js';
import { import {
type Mocked, type Mocked,
@ -24,7 +16,7 @@ import {
toMigrations, toMigrations,
getMockedReporter, getMockedReporter,
getMockedStorage, getMockedStorage,
getErrorCause, assertErrorEqualEnough,
} from '../test-utils.js'; } from '../test-utils.js';
import upCommand from './up.js'; import upCommand from './up.js';
@ -930,15 +922,13 @@ function assertPreconditionsFulfilled(
for (const [index, entry] of failedEntries.entries()) { for (const [index, entry] of failedEntries.entries()) {
if (entry.status === 'failed') { if (entry.status === 'failed') {
const error = reporter.onMigrationError.mock.calls[index]?.arguments[1]; const error = reporter.onMigrationError.mock.calls[index]?.arguments[1];
assert.deepStrictEqual(error, entry.error, 'Error'); assertErrorEqualEnough(error, entry.error, 'Error');
const cause = entry.error?.cause;
assert.deepStrictEqual(error?.cause, cause ? deserializeError(cause) : cause, 'Error cause');
if (entry.started) { if (entry.started) {
const [finishedMigration, error] = storage.onError.mock.calls[index]?.arguments ?? []; const [finishedMigration, error] = storage.onError.mock.calls[index]?.arguments ?? [];
assert.strictEqual(finishedMigration?.name, entry.name); assert.strictEqual(finishedMigration?.name, entry.name);
assert.strictEqual(finishedMigration?.status, entry.status); assert.strictEqual(finishedMigration?.status, entry.status);
assertErrorEqualEnough(error, entry.error); assertErrorEqualEnough(error, entry.error, `Entry error (${entry.name})`);
} }
} }
} }
@ -946,15 +936,7 @@ function assertPreconditionsFulfilled(
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, pending + skipped, 'Total pending and skipped'); assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, pending + skipped, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once'); assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? []; const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
assertErrorEqualEnough(error, finishedError); assertErrorEqualEnough(error, finishedError, 'Finished error');
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
assert.deepStrictEqual(
cause,
expectedCause ? deserializeError(expectedCause) : expectedCause,
'Finished error cause',
);
assert.strictEqual(entries?.length, expected.length, 'Finished entries length'); assert.strictEqual(entries?.length, expected.length, 'Finished entries length');
assert.deepStrictEqual( assert.deepStrictEqual(
entries.map((entry) => `${entry.name} (${entry.status})`), entries.map((entry) => `${entry.name} (${entry.status})`),
@ -995,33 +977,6 @@ function assertPreconditionsFailed(
assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped'); assert.strictEqual(reporter.onMigrationSkip.mock.calls.length, 0, 'Total pending and skipped');
assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once'); assert.strictEqual(reporter.onFinished.mock.calls.length, 1, 'Finished called once');
const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? []; const [entries, error] = reporter.onFinished.mock.calls[0]?.arguments ?? [];
assert.deepStrictEqual(error, finishedError, 'Finished error'); assertErrorEqualEnough(error, finishedError, 'Finished error');
const cause = getErrorCause(error);
const expectedCause = finishedError?.cause;
assert.deepStrictEqual(
cause,
expectedCause ? deserializeError(expectedCause) : expectedCause,
'Finished error cause',
);
assert.strictEqual(entries?.length, 0, 'Finished entries length'); assert.strictEqual(entries?.length, 0, 'Finished entries length');
} }
function assertErrorEqualEnough(actual?: Error | SerializedError, expected?: Error) {
if (expected === undefined) {
assert.strictEqual(actual, undefined);
return;
}
const {
cause: actualCause,
stack: actualStack,
...actualError
} = actual instanceof Error ? toSerializedError(actual) : actual ?? {};
const { cause: expectedCause, stack: expectedStack, ...expectedError } = toSerializedError(expected);
// @ts-expect-error Ignore
const { stack: actualCauseStack, ...actualCauseRest } = actualCause ?? {};
// @ts-expect-error Ignore
const { stack: expectedCauseStack, ...expectedCauseRest } = expectedCause ?? {};
assert.deepStrictEqual(actualError, expectedError);
assert.deepStrictEqual(actualCauseRest, expectedCauseRest);
}

View file

@ -8,7 +8,7 @@ import { serializeError, errorConstructors, deserializeError } from 'serialize-e
const formatter = new Intl.ListFormat('en', { style: 'long', type: 'disjunction' }); const formatter = new Intl.ListFormat('en', { style: 'long', type: 'disjunction' });
export const toError = (error: unknown) => (error instanceof Error ? error : new Error(String(error))); export const toError = (error: unknown): Error => (error instanceof Error ? error : new Error(String(error)));
export const toSerializedError = (error: unknown) => { export const toSerializedError = (error: unknown) => {
const errorInstance = toError(error); const errorInstance = toError(error);
@ -30,7 +30,7 @@ export class EmigrateError extends Error {
export class ShowUsageError extends EmigrateError {} export class ShowUsageError extends EmigrateError {}
export class MissingOptionError extends ShowUsageError { export class MissingOptionError extends ShowUsageError {
static fromOption(option: string | string[]) { static fromOption(option: string | string[]): MissingOptionError {
return new MissingOptionError( return new MissingOptionError(
`Missing required option: ${Array.isArray(option) ? formatter.format(option) : option}`, `Missing required option: ${Array.isArray(option) ? formatter.format(option) : option}`,
undefined, undefined,
@ -48,7 +48,7 @@ export class MissingOptionError extends ShowUsageError {
} }
export class MissingArgumentsError extends ShowUsageError { export class MissingArgumentsError extends ShowUsageError {
static fromArgument(argument: string) { static fromArgument(argument: string): MissingArgumentsError {
return new MissingArgumentsError(`Missing required argument: ${argument}`, undefined, argument); return new MissingArgumentsError(`Missing required argument: ${argument}`, undefined, argument);
} }
@ -62,7 +62,7 @@ export class MissingArgumentsError extends ShowUsageError {
} }
export class OptionNeededError extends ShowUsageError { export class OptionNeededError extends ShowUsageError {
static fromOption(option: string, message: string) { static fromOption(option: string, message: string): OptionNeededError {
return new OptionNeededError(message, undefined, option); return new OptionNeededError(message, undefined, option);
} }
@ -76,7 +76,7 @@ export class OptionNeededError extends ShowUsageError {
} }
export class BadOptionError extends ShowUsageError { export class BadOptionError extends ShowUsageError {
static fromOption(option: string, message: string) { static fromOption(option: string, message: string): BadOptionError {
return new BadOptionError(message, undefined, option); return new BadOptionError(message, undefined, option);
} }
@ -96,7 +96,7 @@ export class UnexpectedError extends EmigrateError {
} }
export class MigrationHistoryError extends EmigrateError { export class MigrationHistoryError extends EmigrateError {
static fromHistoryEntry(entry: FailedMigrationHistoryEntry) { static fromHistoryEntry(entry: FailedMigrationHistoryEntry): MigrationHistoryError {
return new MigrationHistoryError(`Migration ${entry.name} is in a failed state, it should be fixed and removed`, { return new MigrationHistoryError(`Migration ${entry.name} is in a failed state, it should be fixed and removed`, {
cause: deserializeError(entry.error), cause: deserializeError(entry.error),
}); });
@ -108,7 +108,7 @@ export class MigrationHistoryError extends EmigrateError {
} }
export class MigrationLoadError extends EmigrateError { export class MigrationLoadError extends EmigrateError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error) { static fromMetadata(metadata: MigrationMetadata, cause?: Error): MigrationLoadError {
return new MigrationLoadError(`Failed to load migration file: ${metadata.relativeFilePath}`, { cause }); return new MigrationLoadError(`Failed to load migration file: ${metadata.relativeFilePath}`, { cause });
} }
@ -118,7 +118,7 @@ export class MigrationLoadError extends EmigrateError {
} }
export class MigrationRunError extends EmigrateError { export class MigrationRunError extends EmigrateError {
static fromMetadata(metadata: FailedMigrationMetadata) { static fromMetadata(metadata: FailedMigrationMetadata): MigrationRunError {
return new MigrationRunError(`Failed to run migration: ${metadata.relativeFilePath}`, { cause: metadata.error }); return new MigrationRunError(`Failed to run migration: ${metadata.relativeFilePath}`, { cause: metadata.error });
} }
@ -128,7 +128,7 @@ export class MigrationRunError extends EmigrateError {
} }
export class MigrationNotRunError extends EmigrateError { export class MigrationNotRunError extends EmigrateError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error) { static fromMetadata(metadata: MigrationMetadata, cause?: Error): MigrationNotRunError {
return new MigrationNotRunError(`Migration "${metadata.name}" is not in the migration history`, { cause }); return new MigrationNotRunError(`Migration "${metadata.name}" is not in the migration history`, { cause });
} }
@ -138,7 +138,7 @@ export class MigrationNotRunError extends EmigrateError {
} }
export class MigrationRemovalError extends EmigrateError { export class MigrationRemovalError extends EmigrateError {
static fromMetadata(metadata: MigrationMetadata, cause?: Error) { static fromMetadata(metadata: MigrationMetadata, cause?: Error): MigrationRemovalError {
return new MigrationRemovalError(`Failed to remove migration: ${metadata.relativeFilePath}`, { cause }); return new MigrationRemovalError(`Failed to remove migration: ${metadata.relativeFilePath}`, { cause });
} }
@ -148,7 +148,7 @@ export class MigrationRemovalError extends EmigrateError {
} }
export class StorageInitError extends EmigrateError { export class StorageInitError extends EmigrateError {
static fromError(error: Error) { static fromError(error: Error): StorageInitError {
return new StorageInitError('Could not initialize storage', { cause: error }); return new StorageInitError('Could not initialize storage', { cause: error });
} }
@ -158,11 +158,11 @@ export class StorageInitError extends EmigrateError {
} }
export class CommandAbortError extends EmigrateError { export class CommandAbortError extends EmigrateError {
static fromSignal(signal: NodeJS.Signals) { static fromSignal(signal: NodeJS.Signals): CommandAbortError {
return new CommandAbortError(`Command aborted due to signal: ${signal}`); return new CommandAbortError(`Command aborted due to signal: ${signal}`);
} }
static fromReason(reason: string, cause?: unknown) { static fromReason(reason: string, cause?: unknown): CommandAbortError {
return new CommandAbortError(`Command aborted: ${reason}`, { cause }); return new CommandAbortError(`Command aborted: ${reason}`, { cause });
} }
@ -172,7 +172,7 @@ export class CommandAbortError extends EmigrateError {
} }
export class ExecutionDesertedError extends EmigrateError { export class ExecutionDesertedError extends EmigrateError {
static fromReason(reason: string, cause?: Error) { static fromReason(reason: string, cause?: Error): ExecutionDesertedError {
return new ExecutionDesertedError(`Execution deserted: ${reason}`, { cause }); return new ExecutionDesertedError(`Execution deserted: ${reason}`, { cause });
} }

View file

@ -1,6 +1,6 @@
import process from 'node:process'; import process from 'node:process';
export const getDuration = (start: [number, number]) => { export const getDuration = (start: [number, number]): number => {
const [seconds, nanoseconds] = process.hrtime(start); const [seconds, nanoseconds] = process.hrtime(start);
return seconds * 1000 + nanoseconds / 1_000_000; return seconds * 1000 + nanoseconds / 1_000_000;
}; };

View file

@ -39,6 +39,6 @@ export const getMigrations = async (cwd: string, directory: string): Promise<Mig
extension: withLeadingPeriod(path.extname(name)), extension: withLeadingPeriod(path.extname(name)),
directory, directory,
cwd, cwd,
} satisfies MigrationMetadata; };
}); });
}; };

View file

@ -28,4 +28,7 @@ const getPackageInfo = async () => {
throw new UnexpectedError(`Could not read package info from: ${packageInfoPath}`); throw new UnexpectedError(`Could not read package info from: ${packageInfoPath}`);
}; };
export const { version } = await getPackageInfo(); const packageInfo = await getPackageInfo();
// eslint-disable-next-line prefer-destructuring
export const version: string = packageInfo.version;

View file

@ -1,5 +1,5 @@
export * from './types.js'; export * from './types.js';
export const emigrate = () => { export const emigrate = (): void => {
// console.log('Done!'); // console.log('Done!');
}; };

View file

@ -471,6 +471,6 @@ class DefaultReporter implements Required<EmigrateReporter> {
} }
} }
const reporterDefault = interactive ? new DefaultFancyReporter() : new DefaultReporter(); const reporterDefault: EmigrateReporter = interactive ? new DefaultFancyReporter() : new DefaultReporter();
export default reporterDefault; export default reporterDefault;

View file

@ -1,7 +1,8 @@
import type { EmigrateReporter } from '@emigrate/types';
import { type Config } from '../types.js'; import { type Config } from '../types.js';
import * as reporters from './index.js'; import * as reporters from './index.js';
export const getStandardReporter = (reporter?: Config['reporter']) => { export const getStandardReporter = (reporter?: Config['reporter']): EmigrateReporter | undefined => {
if (!reporter) { if (!reporter) {
return reporters.pretty; return reporters.pretty;
} }
@ -10,5 +11,5 @@ export const getStandardReporter = (reporter?: Config['reporter']) => {
return reporters[reporter as keyof typeof reporters]; return reporters[reporter as keyof typeof reporters];
} }
return; // eslint-disable-line no-useless-return return undefined;
}; };

View file

@ -55,6 +55,6 @@ class JsonReporter implements EmigrateReporter {
} }
} }
const jsonReporter = new JsonReporter() as EmigrateReporter; const jsonReporter: EmigrateReporter = new JsonReporter();
export default jsonReporter; export default jsonReporter;

View file

@ -1,5 +1,6 @@
import { mock, type Mock } from 'node:test'; import { mock, type Mock } from 'node:test';
import path from 'node:path'; import path from 'node:path';
import assert from 'node:assert';
import { import {
type SerializedError, type SerializedError,
type EmigrateReporter, type EmigrateReporter,
@ -9,13 +10,14 @@ import {
type NonFailedMigrationHistoryEntry, type NonFailedMigrationHistoryEntry,
type Storage, type Storage,
} from '@emigrate/types'; } from '@emigrate/types';
import { toSerializedError } from './errors.js';
export type Mocked<T> = { export type Mocked<T> = {
// @ts-expect-error - This is a mock // @ts-expect-error - This is a mock
[K in keyof T]: Mock<T[K]>; [K in keyof T]: Mock<T[K]>;
}; };
export async function noop() { export async function noop(): Promise<void> {
// noop // noop
} }
@ -31,8 +33,8 @@ export function getErrorCause(error: Error | undefined): Error | SerializedError
return undefined; return undefined;
} }
export function getMockedStorage(historyEntries: Array<string | MigrationHistoryEntry>) { export function getMockedStorage(historyEntries: Array<string | MigrationHistoryEntry>): Mocked<Storage> {
const storage: Mocked<Storage> = { return {
lock: mock.fn(async (migrations) => migrations), lock: mock.fn(async (migrations) => migrations),
unlock: mock.fn(async () => { unlock: mock.fn(async () => {
// void // void
@ -45,8 +47,6 @@ export function getMockedStorage(historyEntries: Array<string | MigrationHistory
onError: mock.fn(), onError: mock.fn(),
end: mock.fn(), end: mock.fn(),
}; };
return storage;
} }
export function getMockedReporter(): Mocked<Required<EmigrateReporter>> { export function getMockedReporter(): Mocked<Required<EmigrateReporter>> {
@ -112,3 +112,23 @@ export function toEntries(
): MigrationHistoryEntry[] { ): MigrationHistoryEntry[] {
return names.map((name) => (typeof name === 'string' ? toEntry(name, status) : name)); return names.map((name) => (typeof name === 'string' ? toEntry(name, status) : name));
} }
export function assertErrorEqualEnough(actual?: Error | SerializedError, expected?: Error, message?: string): void {
if (expected === undefined) {
assert.strictEqual(actual, undefined);
return;
}
const {
cause: actualCause,
stack: actualStack,
...actualError
} = actual instanceof Error ? toSerializedError(actual) : actual ?? {};
const { cause: expectedCause, stack: expectedStack, ...expectedError } = toSerializedError(expected);
// @ts-expect-error Ignore
const { stack: actualCauseStack, ...actualCauseRest } = actualCause ?? {};
// @ts-expect-error Ignore
const { stack: expectedCauseStack, ...expectedCauseRest } = expectedCause ?? {};
assert.deepStrictEqual(actualError, expectedError, message);
assert.deepStrictEqual(actualCauseRest, expectedCauseRest, message ? `${message} (cause)` : undefined);
}

View file

@ -1 +1 @@
export const withLeadingPeriod = (string: string) => (string.startsWith('.') ? string : `.${string}`); export const withLeadingPeriod = (string: string): string => (string.startsWith('.') ? string : `.${string}`);

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,5 +1,22 @@
# @emigrate/mysql # @emigrate/mysql
## 0.3.3
### Patch Changes
- 26240f4: Make sure we can initialize multiple running instances of Emigrate using @emigrate/mysql concurrently without issues with creating the history table (for instance in a Kubernetes environment and/or with a Percona cluster).
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- 26240f4: Either lock all or none of the migrations to run to make sure they run in order when multiple instances of Emigrate runs concurrently (for instance in a Kubernetes environment)
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.3.2
### Patch Changes
- 57498db: Unreference all connections when run using Bun, to not keep the process open unnecessarily long
## 0.3.1 ## 0.3.1
### Patch Changes ### Patch Changes

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/mysql", "name": "@emigrate/mysql",
"version": "0.3.1", "version": "0.3.3",
"publishConfig": { "publishConfig": {
"access": "public", "access": "public",
"provenance": true "provenance": true
@ -17,12 +17,16 @@
}, },
"files": [ "files": [
"dist", "dist",
"!dist/*.tsbuildinfo" "!dist/*.tsbuildinfo",
"!dist/**/*.test.js",
"!dist/tests/*"
], ],
"scripts": { "scripts": {
"build": "tsc --pretty", "build": "tsc --pretty",
"build:watch": "tsc --pretty --watch", "build:watch": "tsc --pretty --watch",
"lint": "xo --cwd=../.. $(pwd)" "lint": "xo --cwd=../.. $(pwd)",
"integration": "glob -c \"node --import tsx --test-reporter spec --test\" \"./src/**/*.integration.ts\"",
"integration:watch": "glob -c \"node --watch --import tsx --test-reporter spec --test\" \"./src/**/*.integration.ts\""
}, },
"keywords": [ "keywords": [
"emigrate", "emigrate",
@ -45,8 +49,8 @@
}, },
"devDependencies": { "devDependencies": {
"@emigrate/tsconfig": "workspace:*", "@emigrate/tsconfig": "workspace:*",
"@types/bun": "1.0.5", "@types/bun": "1.1.2",
"bun-types": "1.0.26" "bun-types": "1.1.8"
}, },
"volta": { "volta": {
"extends": "../../package.json" "extends": "../../package.json"

View file

@ -0,0 +1,103 @@
import assert from 'node:assert';
import path from 'node:path';
import { before, after, describe, it } from 'node:test';
import type { MigrationMetadata } from '@emigrate/types';
import { startDatabase, stopDatabase } from './tests/database.js';
import { createMysqlStorage } from './index.js';
let db: { port: number; host: string };
const toEnd = new Set<{ end: () => Promise<void> }>();
describe('emigrate-mysql', async () => {
before(
async () => {
db = await startDatabase();
},
{ timeout: 60_000 },
);
after(
async () => {
for (const storage of toEnd) {
// eslint-disable-next-line no-await-in-loop
await storage.end();
}
toEnd.clear();
await stopDatabase();
},
{ timeout: 10_000 },
);
describe('migration locks', async () => {
it('either locks none or all of the given migrations', async () => {
const { initializeStorage } = createMysqlStorage({
table: 'migrations',
connection: {
host: db.host,
user: 'emigrate',
password: 'emigrate',
database: 'emigrate',
port: db.port,
},
});
const [storage1, storage2] = await Promise.all([initializeStorage(), initializeStorage()]);
toEnd.add(storage1);
toEnd.add(storage2);
const migrations = toMigrations('/emigrate', 'migrations', [
'2023-10-01-01-test.js',
'2023-10-01-02-test.js',
'2023-10-01-03-test.js',
'2023-10-01-04-test.js',
'2023-10-01-05-test.js',
'2023-10-01-06-test.js',
'2023-10-01-07-test.js',
'2023-10-01-08-test.js',
'2023-10-01-09-test.js',
'2023-10-01-10-test.js',
'2023-10-01-11-test.js',
'2023-10-01-12-test.js',
'2023-10-01-13-test.js',
'2023-10-01-14-test.js',
'2023-10-01-15-test.js',
'2023-10-01-16-test.js',
'2023-10-01-17-test.js',
'2023-10-01-18-test.js',
'2023-10-01-19-test.js',
'2023-10-01-20-test.js',
]);
const [locked1, locked2] = await Promise.all([storage1.lock(migrations), storage2.lock(migrations)]);
assert.strictEqual(
locked1.length === 0 || locked2.length === 0,
true,
'One of the processes should have no locks',
);
assert.strictEqual(
locked1.length === 20 || locked2.length === 20,
true,
'One of the processes should have all locks',
);
});
});
});
function toMigration(cwd: string, directory: string, name: string): MigrationMetadata {
return {
name,
filePath: `${cwd}/${directory}/${name}`,
relativeFilePath: `${directory}/${name}`,
extension: path.extname(name),
directory,
cwd,
};
}
function toMigrations(cwd: string, directory: string, names: string[]): MigrationMetadata[] {
return names.map((name) => toMigration(cwd, directory, name));
}

View file

@ -1,5 +1,6 @@
import process from 'node:process'; import process from 'node:process';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import { setTimeout } from 'node:timers/promises';
import { import {
createConnection, createConnection,
createPool, createPool,
@ -13,7 +14,9 @@ import {
} from 'mysql2/promise'; } from 'mysql2/promise';
import { getTimestampPrefix, sanitizeMigrationName } from '@emigrate/plugin-tools'; import { getTimestampPrefix, sanitizeMigrationName } from '@emigrate/plugin-tools';
import { import {
type Awaitable,
type MigrationMetadata, type MigrationMetadata,
type MigrationFunction,
type EmigrateStorage, type EmigrateStorage,
type LoaderPlugin, type LoaderPlugin,
type Storage, type Storage,
@ -41,27 +44,39 @@ export type MysqlLoaderOptions = {
connection: ConnectionOptions | string; connection: ConnectionOptions | string;
}; };
const getConnection = async (connection: ConnectionOptions | string) => { const getConnection = async (options: ConnectionOptions | string) => {
if (typeof connection === 'string') { let connection: Connection;
const uri = new URL(connection);
if (typeof options === 'string') {
const uri = new URL(options);
// client side connectTimeout is unstable in mysql2 library // client side connectTimeout is unstable in mysql2 library
// it throws an error you can't catch and crashes node // it throws an error you can't catch and crashes node
// best to leave this at 0 (disabled) // best to leave this at 0 (disabled)
uri.searchParams.set('connectTimeout', '0'); uri.searchParams.set('connectTimeout', '0');
uri.searchParams.set('multipleStatements', 'true'); uri.searchParams.set('multipleStatements', 'true');
uri.searchParams.set('flags', '-FOUND_ROWS');
return createConnection(uri.toString()); connection = await createConnection(uri.toString());
} else {
connection = await createConnection({
...options,
// client side connectTimeout is unstable in mysql2 library
// it throws an error you can't catch and crashes node
// best to leave this at 0 (disabled)
connectTimeout: 0,
multipleStatements: true,
flags: ['-FOUND_ROWS'],
});
} }
return createConnection({ if (process.isBun) {
...connection, // @ts-expect-error the connection is not in the types but it's there
// client side connectTimeout is unstable in mysql2 library // eslint-disable-next-line @typescript-eslint/no-unsafe-call
// it throws an error you can't catch and crashes node connection.connection.stream.unref();
// best to leave this at 0 (disabled) }
connectTimeout: 0,
multipleStatements: true, return connection;
});
}; };
const getPool = (connection: PoolOptions | string) => { const getPool = (connection: PoolOptions | string) => {
@ -72,6 +87,7 @@ const getPool = (connection: PoolOptions | string) => {
// it throws an error you can't catch and crashes node // it throws an error you can't catch and crashes node
// best to leave this at 0 (disabled) // best to leave this at 0 (disabled)
uri.searchParams.set('connectTimeout', '0'); uri.searchParams.set('connectTimeout', '0');
uri.searchParams.set('flags', '-FOUND_ROWS');
return createPool(uri.toString()); return createPool(uri.toString());
} }
@ -82,6 +98,7 @@ const getPool = (connection: PoolOptions | string) => {
// it throws an error you can't catch and crashes node // it throws an error you can't catch and crashes node
// best to leave this at 0 (disabled) // best to leave this at 0 (disabled)
connectTimeout: 0, connectTimeout: 0,
flags: ['-FOUND_ROWS'],
}); });
}; };
@ -92,8 +109,8 @@ type HistoryEntry = {
error?: SerializedError; error?: SerializedError;
}; };
const lockMigration = async (pool: Pool, table: string, migration: MigrationMetadata) => { const lockMigration = async (connection: Connection, table: string, migration: MigrationMetadata) => {
const [result] = await pool.execute<ResultSetHeader>({ const [result] = await connection.execute<ResultSetHeader>({
sql: ` sql: `
INSERT INTO ${escapeId(table)} (name, status, date) INSERT INTO ${escapeId(table)} (name, status, date)
VALUES (?, ?, NOW()) VALUES (?, ?, NOW())
@ -216,8 +233,10 @@ const initializeDatabase = async (config: ConnectionOptions | string) => {
} }
}; };
const initializeTable = async (pool: Pool, table: string) => { const lockWaitTimeout = 10; // seconds
const [result] = await pool.execute<RowDataPacket[]>({
const isHistoryTableExisting = async (connection: Connection, table: string) => {
const [result] = await connection.execute<RowDataPacket[]>({
sql: ` sql: `
SELECT SELECT
1 as table_exists 1 as table_exists
@ -230,24 +249,70 @@ const initializeTable = async (pool: Pool, table: string) => {
values: [table], values: [table],
}); });
if (result[0]?.['table_exists']) { return result[0]?.['table_exists'] === 1;
};
const initializeTable = async (config: ConnectionOptions | string, table: string) => {
const connection = await getConnection(config);
if (await isHistoryTableExisting(connection, table)) {
await connection.end();
return; return;
} }
// This table definition is compatible with the one used by the immigration-mysql package const lockName = `emigrate_init_table_lock_${table}`;
await pool.execute(`
CREATE TABLE ${escapeId(table)} ( const [lockResult] = await connection.query<RowDataPacket[]>(`SELECT GET_LOCK(?, ?) AS got_lock`, [
name varchar(255) not null primary key, lockName,
status varchar(32), lockWaitTimeout,
date datetime not null ]);
) Engine=InnoDB; const didGetLock = lockResult[0]?.['got_lock'] === 1;
`);
if (didGetLock) {
try {
// This table definition is compatible with the one used by the immigration-mysql package
await connection.execute(`
CREATE TABLE IF NOT EXISTS ${escapeId(table)} (
name varchar(255) not null primary key,
status varchar(32),
date datetime not null
) Engine=InnoDB;
`);
} finally {
await connection.query(`SELECT RELEASE_LOCK(?)`, [lockName]);
await connection.end();
}
return;
}
// Didn't get the lock, wait to see if the table was created by another process
const maxWait = lockWaitTimeout * 1000; // milliseconds
const checkInterval = 250; // milliseconds
const start = Date.now();
try {
while (Date.now() - start < maxWait) {
// eslint-disable-next-line no-await-in-loop
if (await isHistoryTableExisting(connection, table)) {
return;
}
// eslint-disable-next-line no-await-in-loop
await setTimeout(checkInterval);
}
throw new Error(`Timeout waiting for table ${table} to be created by other process`);
} finally {
await connection.end();
}
}; };
export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlStorageOptions): EmigrateStorage => { export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlStorageOptions): EmigrateStorage => {
return { return {
async initializeStorage() { async initializeStorage() {
await initializeDatabase(connection); await initializeDatabase(connection);
await initializeTable(connection, table);
const pool = getPool(connection); const pool = getPool(connection);
@ -259,24 +324,35 @@ export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlSt
}); });
} }
try {
await initializeTable(pool, table);
} catch (error) {
await pool.end();
throw error;
}
const storage: Storage = { const storage: Storage = {
async lock(migrations) { async lock(migrations) {
const lockedMigrations: MigrationMetadata[] = []; const connection = await pool.getConnection();
for await (const migration of migrations) { try {
if (await lockMigration(pool, table, migration)) { await connection.beginTransaction();
lockedMigrations.push(migration); const lockedMigrations: MigrationMetadata[] = [];
for await (const migration of migrations) {
if (await lockMigration(connection, table, migration)) {
lockedMigrations.push(migration);
}
} }
}
return lockedMigrations; if (lockedMigrations.length === migrations.length) {
await connection.commit();
return lockedMigrations;
}
await connection.rollback();
return [];
} catch (error) {
await connection.rollback();
throw error;
} finally {
connection.release();
}
}, },
async unlock(migrations) { async unlock(migrations) {
for await (const migration of migrations) { for await (const migration of migrations) {
@ -335,17 +411,6 @@ export const createMysqlStorage = ({ table = defaultTable, connection }: MysqlSt
}; };
}; };
export const { initializeStorage } = createMysqlStorage({
table: process.env['MYSQL_TABLE'],
connection: process.env['MYSQL_URL'] ?? {
host: process.env['MYSQL_HOST'],
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
user: process.env['MYSQL_USER'],
password: process.env['MYSQL_PASSWORD'],
database: process.env['MYSQL_DATABASE'],
},
});
export const createMysqlLoader = ({ connection }: MysqlLoaderOptions): LoaderPlugin => { export const createMysqlLoader = ({ connection }: MysqlLoaderOptions): LoaderPlugin => {
return { return {
loadableExtensions: ['.sql'], loadableExtensions: ['.sql'],
@ -354,12 +419,6 @@ export const createMysqlLoader = ({ connection }: MysqlLoaderOptions): LoaderPlu
const contents = await fs.readFile(migration.filePath, 'utf8'); const contents = await fs.readFile(migration.filePath, 'utf8');
const conn = await getConnection(connection); const conn = await getConnection(connection);
if (process.isBun) {
// @ts-expect-error the connection is not in the types but it's there
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
conn.connection.stream.unref();
}
try { try {
await conn.query(contents); await conn.query(contents);
} finally { } finally {
@ -370,7 +429,16 @@ export const createMysqlLoader = ({ connection }: MysqlLoaderOptions): LoaderPlu
}; };
}; };
export const { loadableExtensions, loadMigration } = createMysqlLoader({ export const generateMigration: GenerateMigrationFunction = async (name) => {
return {
filename: `${getTimestampPrefix()}_${sanitizeMigrationName(name)}.sql`,
content: `-- Migration: ${name}
`,
};
};
const storage = createMysqlStorage({
table: process.env['MYSQL_TABLE'],
connection: process.env['MYSQL_URL'] ?? { connection: process.env['MYSQL_URL'] ?? {
host: process.env['MYSQL_HOST'], host: process.env['MYSQL_HOST'],
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined, port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
@ -380,13 +448,22 @@ export const { loadableExtensions, loadMigration } = createMysqlLoader({
}, },
}); });
export const generateMigration: GenerateMigrationFunction = async (name) => { const loader = createMysqlLoader({
return { connection: process.env['MYSQL_URL'] ?? {
filename: `${getTimestampPrefix()}_${sanitizeMigrationName(name)}.sql`, host: process.env['MYSQL_HOST'],
content: `-- Migration: ${name} port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : undefined,
`, user: process.env['MYSQL_USER'],
}; password: process.env['MYSQL_PASSWORD'],
}; database: process.env['MYSQL_DATABASE'],
},
});
// eslint-disable-next-line prefer-destructuring
export const initializeStorage: () => Promise<Storage> = storage.initializeStorage;
// eslint-disable-next-line prefer-destructuring
export const loadableExtensions: string[] = loader.loadableExtensions;
// eslint-disable-next-line prefer-destructuring
export const loadMigration: (migration: MigrationMetadata) => Awaitable<MigrationFunction> = loader.loadMigration;
const defaultExport: EmigrateStorage & LoaderPlugin & GeneratorPlugin = { const defaultExport: EmigrateStorage & LoaderPlugin & GeneratorPlugin = {
initializeStorage, initializeStorage,

View file

@ -0,0 +1,49 @@
/* eslint @typescript-eslint/naming-convention:0, import/no-extraneous-dependencies: 0 */
import process from 'node:process';
import { GenericContainer, type StartedTestContainer } from 'testcontainers';
let container: StartedTestContainer | undefined;
export const startDatabase = async (): Promise<{ port: number; host: string }> => {
if (process.env['CI']) {
const config = {
port: process.env['MYSQL_PORT'] ? Number.parseInt(process.env['MYSQL_PORT'], 10) : 3306,
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
host: process.env['MYSQL_HOST'] || 'localhost',
};
console.log(`Connecting to MySQL from environment variables: ${JSON.stringify(config)}`);
return config;
}
if (!container) {
console.log('Starting MySQL container...');
const containerSetup = new GenericContainer('mysql:8.2')
.withEnvironment({
MYSQL_ROOT_PASSWORD: 'admin',
MYSQL_USER: 'emigrate',
MYSQL_PASSWORD: 'emigrate',
MYSQL_DATABASE: 'emigrate',
})
.withTmpFs({ '/var/lib/mysql': 'rw' })
.withCommand(['--sql-mode=NO_ENGINE_SUBSTITUTION', '--default-authentication-plugin=mysql_native_password'])
.withExposedPorts(3306)
.withReuse();
container = await containerSetup.start();
console.log('MySQL container started');
}
return { port: container.getMappedPort(3306), host: container.getHost() };
};
export const stopDatabase = async (): Promise<void> => {
if (container) {
console.log('Stopping MySQL container...');
await container.stop();
console.log('MySQL container stopped');
container = undefined;
}
};

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,5 +1,13 @@
# @emigrate/plugin-generate-js # @emigrate/plugin-generate-js
## 0.3.8
### Patch Changes
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.3.7 ## 0.3.7
### Patch Changes ### Patch Changes

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/plugin-generate-js", "name": "@emigrate/plugin-generate-js",
"version": "0.3.7", "version": "0.3.8",
"publishConfig": { "publishConfig": {
"access": "public" "access": "public"
}, },

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,5 +1,12 @@
# @emigrate/plugin-tools # @emigrate/plugin-tools
## 0.9.8
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- @emigrate/types@0.12.2
## 0.9.7 ## 0.9.7
### Patch Changes ### Patch Changes

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/plugin-tools", "name": "@emigrate/plugin-tools",
"version": "0.9.7", "version": "0.9.8",
"publishConfig": { "publishConfig": {
"access": "public", "access": "public",
"provenance": true "provenance": true

View file

@ -204,7 +204,7 @@ const load = async <T>(
* *
* @returns A timestamp string in the format YYYYMMDDHHmmssmmm * @returns A timestamp string in the format YYYYMMDDHHmmssmmm
*/ */
export const getTimestampPrefix = () => new Date().toISOString().replaceAll(/[-:ZT.]/g, ''); export const getTimestampPrefix = (): string => new Date().toISOString().replaceAll(/[-:ZT.]/g, '');
/** /**
* A utility function to sanitize a migration name so that it can be used as a filename * A utility function to sanitize a migration name so that it can be used as a filename
@ -212,7 +212,7 @@ export const getTimestampPrefix = () => new Date().toISOString().replaceAll(/[-:
* @param name A migration name to sanitize * @param name A migration name to sanitize
* @returns A sanitized migration name that can be used as a filename * @returns A sanitized migration name that can be used as a filename
*/ */
export const sanitizeMigrationName = (name: string) => export const sanitizeMigrationName = (name: string): string =>
name name
.replaceAll(/[\W/\\:|*?'"<>_]+/g, '_') .replaceAll(/[\W/\\:|*?'"<>_]+/g, '_')
.trim() .trim()

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,5 +1,14 @@
# @emigrate/postgres # @emigrate/postgres
## 0.3.2
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- Updated dependencies [d779286]
- @emigrate/plugin-tools@0.9.8
- @emigrate/types@0.12.2
## 0.3.1 ## 0.3.1
### Patch Changes ### Patch Changes

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/postgres", "name": "@emigrate/postgres",
"version": "0.3.1", "version": "0.3.2",
"publishConfig": { "publishConfig": {
"access": "public", "access": "public",
"provenance": true "provenance": true

View file

@ -11,6 +11,8 @@ import {
type GeneratorPlugin, type GeneratorPlugin,
type SerializedError, type SerializedError,
type MigrationHistoryEntry, type MigrationHistoryEntry,
type Awaitable,
type MigrationFunction,
} from '@emigrate/types'; } from '@emigrate/types';
const defaultTable = 'migrations'; const defaultTable = 'migrations';
@ -255,17 +257,6 @@ export const createPostgresStorage = ({
}; };
}; };
export const { initializeStorage } = createPostgresStorage({
table: process.env['POSTGRES_TABLE'],
connection: process.env['POSTGRES_URL'] ?? {
host: process.env['POSTGRES_HOST'],
port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
user: process.env['POSTGRES_USER'],
password: process.env['POSTGRES_PASSWORD'],
database: process.env['POSTGRES_DB'],
},
});
export const createPostgresLoader = ({ connection }: PostgresLoaderOptions): LoaderPlugin => { export const createPostgresLoader = ({ connection }: PostgresLoaderOptions): LoaderPlugin => {
return { return {
loadableExtensions: ['.sql'], loadableExtensions: ['.sql'],
@ -284,7 +275,16 @@ export const createPostgresLoader = ({ connection }: PostgresLoaderOptions): Loa
}; };
}; };
export const { loadableExtensions, loadMigration } = createPostgresLoader({ export const generateMigration: GenerateMigrationFunction = async (name) => {
return {
filename: `${getTimestampPrefix()}_${sanitizeMigrationName(name)}.sql`,
content: `-- Migration: ${name}
`,
};
};
const storage = createPostgresStorage({
table: process.env['POSTGRES_TABLE'],
connection: process.env['POSTGRES_URL'] ?? { connection: process.env['POSTGRES_URL'] ?? {
host: process.env['POSTGRES_HOST'], host: process.env['POSTGRES_HOST'],
port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined, port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
@ -294,13 +294,22 @@ export const { loadableExtensions, loadMigration } = createPostgresLoader({
}, },
}); });
export const generateMigration: GenerateMigrationFunction = async (name) => { const loader = createPostgresLoader({
return { connection: process.env['POSTGRES_URL'] ?? {
filename: `${getTimestampPrefix()}_${sanitizeMigrationName(name)}.sql`, host: process.env['POSTGRES_HOST'],
content: `-- Migration: ${name} port: process.env['POSTGRES_PORT'] ? Number.parseInt(process.env['POSTGRES_PORT'], 10) : undefined,
`, user: process.env['POSTGRES_USER'],
}; password: process.env['POSTGRES_PASSWORD'],
}; database: process.env['POSTGRES_DB'],
},
});
// eslint-disable-next-line prefer-destructuring
export const initializeStorage: () => Promise<Storage> = storage.initializeStorage;
// eslint-disable-next-line prefer-destructuring
export const loadableExtensions: string[] = loader.loadableExtensions;
// eslint-disable-next-line prefer-destructuring
export const loadMigration: (migration: MigrationMetadata) => Awaitable<MigrationFunction> = loader.loadMigration;
const defaultExport: EmigrateStorage & LoaderPlugin & GeneratorPlugin = { const defaultExport: EmigrateStorage & LoaderPlugin & GeneratorPlugin = {
initializeStorage, initializeStorage,

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,5 +1,12 @@
# @emigrate/reporter-pino # @emigrate/reporter-pino
## 0.6.5
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
- @emigrate/types@0.12.2
## 0.6.4 ## 0.6.4
### Patch Changes ### Patch Changes

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/reporter-pino", "name": "@emigrate/reporter-pino",
"version": "0.6.4", "version": "0.6.5",
"publishConfig": { "publishConfig": {
"access": "public", "access": "public",
"provenance": true "provenance": true

View file

@ -204,6 +204,8 @@ export const createPinoReporter = (options: PinoReporterOptions = {}): EmigrateR
return new PinoReporter(options); return new PinoReporter(options);
}; };
export default createPinoReporter({ const defaultExport: EmigrateReporter = createPinoReporter({
level: process.env['LOG_LEVEL'], level: process.env['LOG_LEVEL'],
}); });
export default defaultExport;

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

View file

@ -1,5 +1,11 @@
# @emigrate/tsconfig # @emigrate/tsconfig
## 1.0.3
### Patch Changes
- d779286: Upgrade TypeScript to v5.5 and enable [isolatedDeclarations](https://devblogs.microsoft.com/typescript/announcing-typescript-5-5/#isolated-declarations)
## 1.0.2 ## 1.0.2
### Patch Changes ### Patch Changes

View file

@ -11,6 +11,7 @@
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"inlineSources": false, "inlineSources": false,
"isolatedModules": true, "isolatedModules": true,
"isolatedDeclarations": true,
"incremental": true, "incremental": true,
"module": "NodeNext", "module": "NodeNext",
"moduleResolution": "NodeNext", "moduleResolution": "NodeNext",
@ -31,5 +32,7 @@
"strict": true, "strict": true,
"target": "ES2022", "target": "ES2022",
"lib": ["ESNext", "DOM", "DOM.Iterable"] "lib": ["ESNext", "DOM", "DOM.Iterable"]
} },
"include": ["${configDir}/src"],
"exclude": ["${configDir}/dist"]
} }

View file

@ -3,6 +3,7 @@
"display": "Build", "display": "Build",
"extends": "./base.json", "extends": "./base.json",
"compilerOptions": { "compilerOptions": {
"noEmit": false "noEmit": false,
"outDir": "${configDir}/dist"
} }
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@emigrate/tsconfig", "name": "@emigrate/tsconfig",
"version": "1.0.2", "version": "1.0.3",
"publishConfig": { "publishConfig": {
"access": "public", "access": "public",
"provenance": true "provenance": true

View file

@ -1,8 +1,3 @@
{ {
"extends": "@emigrate/tsconfig/build.json", "extends": "@emigrate/tsconfig/build.json"
"compilerOptions": {
"outDir": "dist"
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
} }

11849
pnpm-lock.yaml generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,7 @@
{ {
"$schema": "https://turborepo.org/schema.json", "$schema": "https://turborepo.org/schema.json",
"pipeline": { "ui": "stream",
"tasks": {
"build": { "build": {
"dependsOn": ["^build"], "dependsOn": ["^build"],
"inputs": ["src/**/*", "!src/**/*.test.ts", "tsconfig.json", "tsconfig.build.json"], "inputs": ["src/**/*", "!src/**/*.test.ts", "tsconfig.json", "tsconfig.build.json"],