Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .changeset/support-migrations.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"wrangler": patch
---

Support migration directories (multi-file migrations) for D1 migrations.

Allows a migration to be either a top-level `.sql` file or a directory whose name is the migration name and which contains one or more `.sql` files executed in deterministic order. Backwards compatible with single-file migrations.
197 changes: 197 additions & 0 deletions packages/wrangler/src/__tests__/d1/migrate.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import fs from "node:fs";
import { http, HttpResponse } from "msw";
import { reinitialiseAuthTokens } from "../../user";
import { mockAccountId, mockApiToken } from "../helpers/mock-account-id";
Expand Down Expand Up @@ -189,6 +190,202 @@ Your database may not be available to serve requests during the migration, conti
await runWrangler("d1 migrations apply db --remote");
expect(std.out).toBe("");
});

it("applies migrations from a migration directory containing .sql files", async () => {
setIsTTY(false);
const commands: string[] = [];

msw.use(
http.post(
"*/accounts/:accountId/d1/database/:databaseId/query",
async (req) => {
const { body } = req.request;
if (body && typeof body === "object") {
commands.push(JSON.stringify(body));
} else {
commands.push(String(body));
}

return HttpResponse.json(
{
result: [
{
results: [],
success: true,
meta: {},
},
],
success: true,
errors: [],
messages: [],
},
{ status: 200 }
);
}
)
);

msw.use(
http.get("*/accounts/:accountId/d1/database/:databaseId", async () => {
return HttpResponse.json(
{
result: {
file_size: 123,
name: "testdb",
num_tables: 0,
uuid: "uuid",
version: "production",
},
success: true,
errors: [],
messages: [],
},
{ status: 200 }
);
})
);

writeWranglerConfig({
d1_databases: [
{
binding: "DATABASE",
database_name: "db",
database_id: "xxxx",
migrations_dir: "migrations",
},
],
account_id: "nx01",
});

mockGetMemberships([
{ id: "IG-88", account: { id: "1701", name: "enterprise" } },
{ id: "R2-D2", account: { id: "nx01", name: "enterprise-nx" } },
]);

// Create migration directory with two sql files
fs.mkdirSync("migrations/001_complex", { recursive: true });
fs.writeFileSync(
"migrations/001_complex/01_schema.sql",
"CREATE TABLE users (id INTEGER PRIMARY KEY);"
);
fs.writeFileSync(
"migrations/001_complex/02_seed.sql",
"INSERT INTO users (id) VALUES (1);"
);

mockConfirm({
text: `About to apply 1 migration(s)\nYour database may not be available to serve requests during the migration, continue?`,
result: true,
});

await runWrangler("d1 migrations apply db --remote");

expect(commands.some((c) => c.includes("CREATE TABLE users"))).toBe(true);
expect(commands.some((c) => c.includes("INSERT INTO users"))).toBe(true);
expect(commands.some((c) => c.includes("INSERT INTO migrations"))).toBe(
true
);
});

it("applies a mix of top-level .sql and directory migrations", async () => {
setIsTTY(false);
const commands: string[] = [];

msw.use(
http.post(
"*/accounts/:accountId/d1/database/:databaseId/query",
async (req) => {
const { body } = req.request;
if (body && typeof body === "object") {
commands.push(JSON.stringify(body));
} else {
commands.push(String(body));
}

return HttpResponse.json(
{
result: [
{
results: [],
success: true,
meta: {},
},
],
success: true,
errors: [],
messages: [],
},
{ status: 200 }
);
}
)
);

msw.use(
http.get("*/accounts/:accountId/d1/database/:databaseId", async () => {
return HttpResponse.json(
{
result: {
file_size: 123,
name: "testdb",
num_tables: 0,
uuid: "uuid",
version: "production",
},
success: true,
errors: [],
messages: [],
},
{ status: 200 }
);
})
);

writeWranglerConfig({
d1_databases: [
{
binding: "DATABASE",
database_name: "db",
database_id: "xxxx",
migrations_dir: "migrations",
},
],
account_id: "nx01",
});

mockGetMemberships([
{ id: "IG-88", account: { id: "1701", name: "enterprise" } },
{ id: "R2-D2", account: { id: "nx01", name: "enterprise-nx" } },
]);

// Create top-level migration file and a migration directory
fs.mkdirSync("migrations/002_complex", { recursive: true });
fs.writeFileSync(
"migrations/001_init.sql",
"CREATE TABLE top (id INTEGER PRIMARY KEY);"
);
fs.writeFileSync(
"migrations/002_complex/01_schema.sql",
"CREATE TABLE users (id INTEGER PRIMARY KEY);"
);
fs.writeFileSync(
"migrations/002_complex/02_seed.sql",
"INSERT INTO users (id) VALUES (1);"
);

mockConfirm({
text: `About to apply 2 migration(s)\nYour database may not be available to serve requests during the migration, continue?`,
result: true,
});

await runWrangler("d1 migrations apply db --remote");

expect(commands.some((c) => c.includes("CREATE TABLE top"))).toBe(true);
expect(commands.some((c) => c.includes("CREATE TABLE users"))).toBe(true);
expect(
commands.filter((c) => c.includes("INSERT INTO migrations")).length
).toBe(2);
});
});

describe("list", () => {
Expand Down
35 changes: 30 additions & 5 deletions packages/wrangler/src/d1/migrations/apply.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import fs from "node:fs";
import path from "path";
import path from "node:path";
import { configFileName } from "../../config";
import { createCommand } from "../../core/create-command";
import { confirm } from "../../dialogs";
Expand Down Expand Up @@ -132,13 +132,19 @@ Your database may not be available to serve requests during the migration, conti
}

for (const migration of unappliedMigrations) {
let query = fs.readFileSync(
`${migrationsPath}/${migration.name}`,
"utf8"
const migrationFiles = resolveMigrationFiles(
migrationsPath,
migration.name
);
let query = migrationFiles
.map((file) => fs.readFileSync(file, "utf8"))
.join("\n\n");

const safeMigrationName = migration.name.replace(/'/g, "''");

query += `
INSERT INTO ${migrationsTableName} (name)
values ('${migration.name}');
values ('${safeMigrationName}');
`;

let success = true;
Expand Down Expand Up @@ -209,3 +215,22 @@ Your database may not be available to serve requests during the migration, conti
}
},
});

function resolveMigrationFiles(migrationsPath: string, migrationName: string) {
const fullPath = path.join(migrationsPath, migrationName);

if (fs.existsSync(fullPath)) {
if (fs.statSync(fullPath).isFile()) {
return [fullPath];
} else if (fs.statSync(fullPath).isDirectory()) {
const files = fs.readdirSync(fullPath);
return files
.filter((f) => f.endsWith(".sql"))
.map((f) => path.join(fullPath, f));
}
}

throw new UserError(
`Migration path ${fullPath} is neither a file nor a directory.`
);
}
19 changes: 15 additions & 4 deletions packages/wrangler/src/d1/migrations/helpers.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import fs from "node:fs";
import path from "path";
import path from "node:path";
import { configFileName } from "../../config";
import { confirm } from "../../dialogs";
import { UserError } from "../../errors";
Expand Down Expand Up @@ -129,15 +129,26 @@ const listAppliedMigrations = async ({
return response[0].results as Migration[];
};

function getMigrationNames(migrationsPath: string): Array<string> {
const migrations = [];
/*
* Returns the names of all migrations in the given directory.
* A migration is either a .sql file or a directory containing .sql files.
*/
function getMigrationNames(migrationsPath: string): string[] {
const migrations: string[] = [];

const dir = fs.opendirSync(migrationsPath);

const hasSqlFile = (f: string) => f.endsWith(".sql");

let dirent;
while ((dirent = dir.readSync()) !== null) {
if (dirent.name.endsWith(".sql")) {
if (dirent.isFile() && hasSqlFile(dirent.name)) {
migrations.push(dirent.name);
} else if (dirent.isDirectory()) {
// A non-recursive check for .sql files in the directory
const sub = fs.readdirSync(path.join(migrationsPath, dirent.name));
const hasSql = sub.some(hasSqlFile);
if (hasSql) migrations.push(dirent.name);
}
}

Expand Down