Docker Compose Database Setup (#379)

Co-authored-by: Aman Varshney <amanvarshney.work@gmail.com>
This commit is contained in:
Aidan Sunbury
2025-07-10 09:54:52 -07:00
committed by GitHub
parent 1f2f150f17
commit 3569b04ac4
27 changed files with 479 additions and 140042 deletions

View File

@@ -0,0 +1,51 @@
import path from "node:path";
import type { Database, ProjectConfig } from "../../types";
import {
addEnvVariablesToFile,
type EnvVariable,
} from "../project-generation/env-setup";
export async function setupDockerCompose(config: ProjectConfig): Promise<void> {
const { database, projectDir, projectName } = config;
if (database === "none" || database === "sqlite") {
return;
}
try {
await writeEnvFile(projectDir, database, projectName);
} catch (error) {
if (error instanceof Error) {
console.error(`Error: ${error.message}`);
}
}
}
async function writeEnvFile(
projectDir: string,
database: Database,
projectName: string,
) {
const envPath = path.join(projectDir, "apps/server", ".env");
const variables: EnvVariable[] = [
{
key: "DATABASE_URL",
value: getDatabaseUrl(database, projectName),
condition: true,
},
];
await addEnvVariablesToFile(envPath, variables);
}
function getDatabaseUrl(database: Database, projectName: string): string {
switch (database) {
case "postgres":
return `postgresql://postgres:password@localhost:5432/${projectName}`;
case "mysql":
return `mysql://user:password@localhost:3306/${projectName}`;
case "mongodb":
return `mongodb://root:password@localhost:27017/${projectName}?authSource=admin`;
default:
return "";
}
}

View File

@@ -28,6 +28,7 @@ import {
setupBackendFramework,
setupDbOrmTemplates,
setupDeploymentTemplates,
setupDockerComposeTemplates,
setupExamplesTemplate,
setupFrontendTemplates,
} from "./template-manager";
@@ -44,6 +45,7 @@ export async function createProject(options: ProjectConfig) {
await setupBackendFramework(projectDir, options);
if (!isConvex) {
await setupDbOrmTemplates(projectDir, options);
await setupDockerComposeTemplates(projectDir, options);
await setupAuthTemplate(projectDir, options);
}
if (options.examples.length > 0 && options.examples[0] !== "none") {
@@ -94,7 +96,7 @@ export async function createProject(options: ProjectConfig) {
await initializeGit(projectDir, options.git);
displayPostInstallInstructions({
await displayPostInstallInstructions({
...options,
depsInstalled: options.install,
});

View File

@@ -187,7 +187,8 @@ export async function setupEnvironmentVariables(
dbSetup === "mongodb-atlas" ||
dbSetup === "neon" ||
dbSetup === "supabase" ||
dbSetup === "d1";
dbSetup === "d1" ||
dbSetup === "docker";
if (database !== "none" && !specializedSetup) {
switch (database) {

View File

@@ -7,9 +7,10 @@ import type {
ProjectConfig,
Runtime,
} from "../../types";
import { getDockerStatus } from "../../utils/docker-utils";
import { getPackageExecutionCommand } from "../../utils/package-runner";
export function displayPostInstallInstructions(
export async function displayPostInstallInstructions(
config: ProjectConfig & { depsInstalled: boolean },
) {
const {
@@ -34,7 +35,7 @@ export function displayPostInstallInstructions(
const databaseInstructions =
!isConvex && database !== "none"
? getDatabaseInstructions(database, orm, runCmd, runtime, dbSetup)
? await getDatabaseInstructions(database, orm, runCmd, runtime, dbSetup)
: "";
const tauriInstructions = addons?.includes("tauri")
@@ -193,15 +194,24 @@ function getLintingInstructions(runCmd?: string): string {
)} Format and lint fix: ${`${runCmd} check`}\n`;
}
function getDatabaseInstructions(
async function getDatabaseInstructions(
database: Database,
orm?: ORM,
runCmd?: string,
runtime?: Runtime,
dbSetup?: DatabaseSetup,
): string {
): Promise<string> {
const instructions = [];
if (dbSetup === "docker") {
const dockerStatus = await getDockerStatus(database);
if (dockerStatus.message) {
instructions.push(dockerStatus.message);
instructions.push("");
}
}
if (runtime === "workers" && dbSetup === "d1") {
const packageManager = runCmd === "npm run" ? "npm" : runCmd || "npm";
@@ -255,10 +265,26 @@ function getDatabaseInstructions(
)} Prisma with Bun may require additional configuration. If you encounter errors,\nfollow the guidance provided in the error messages`,
);
}
if (database === "mongodb" && dbSetup === "docker") {
instructions.push(
`${pc.yellow(
"WARNING:",
)} Prisma + MongoDB + Docker combination may not work.`,
);
}
if (dbSetup === "docker") {
instructions.push(
`${pc.cyan("•")} Start docker container: ${`${runCmd} db:start`}`,
);
}
instructions.push(`${pc.cyan("•")} Apply schema: ${`${runCmd} db:push`}`);
instructions.push(`${pc.cyan("•")} Database UI: ${`${runCmd} db:studio`}`);
} else if (orm === "drizzle") {
if (dbSetup === "docker") {
instructions.push(
`${pc.cyan("•")} Start docker container: ${`${runCmd} db:start`}`,
);
}
instructions.push(`${pc.cyan("•")} Apply schema: ${`${runCmd} db:push`}`);
instructions.push(`${pc.cyan("•")} Database UI: ${`${runCmd} db:studio`}`);
if (database === "sqlite" && dbSetup !== "d1") {
@@ -268,6 +294,12 @@ function getDatabaseInstructions(
)} Start local DB (if needed): ${`cd apps/server && ${runCmd} db:local`}`,
);
}
} else if (orm === "mongoose") {
if (dbSetup === "docker") {
instructions.push(
`${pc.cyan("•")} Start docker container: ${`${runCmd} db:start`}`,
);
}
} else if (orm === "none") {
instructions.push(
`${pc.yellow("NOTE:")} Manual database schema setup required.`,

View File

@@ -80,6 +80,12 @@ async function updateRootPackageJson(
scripts["db:migrate"] = `turbo -F ${backendPackageName} db:migrate`;
}
}
if (options.dbSetup === "docker") {
scripts["db:start"] = `turbo -F ${backendPackageName} db:start`;
scripts["db:watch"] = `turbo -F ${backendPackageName} db:watch`;
scripts["db:stop"] = `turbo -F ${backendPackageName} db:stop`;
scripts["db:down"] = `turbo -F ${backendPackageName} db:down`;
}
} else if (options.packageManager === "pnpm") {
scripts.dev = devScript;
scripts.build = "pnpm -r build";
@@ -105,6 +111,12 @@ async function updateRootPackageJson(
`pnpm --filter ${backendPackageName} db:migrate`;
}
}
if (options.dbSetup === "docker") {
scripts["db:start"] = `pnpm --filter ${backendPackageName} db:start`;
scripts["db:watch"] = `pnpm --filter ${backendPackageName} db:watch`;
scripts["db:stop"] = `pnpm --filter ${backendPackageName} db:stop`;
scripts["db:down"] = `pnpm --filter ${backendPackageName} db:down`;
}
} else if (options.packageManager === "npm") {
scripts.dev = devScript;
scripts.build = "npm run build --workspaces";
@@ -132,6 +144,14 @@ async function updateRootPackageJson(
`npm run db:migrate --workspace ${backendPackageName}`;
}
}
if (options.dbSetup === "docker") {
scripts["db:start"] =
`npm run db:start --workspace ${backendPackageName}`;
scripts["db:watch"] =
`npm run db:watch --workspace ${backendPackageName}`;
scripts["db:stop"] = `npm run db:stop --workspace ${backendPackageName}`;
scripts["db:down"] = `npm run db:down --workspace ${backendPackageName}`;
}
} else if (options.packageManager === "bun") {
scripts.dev = devScript;
scripts.build = "bun run --filter '*' build";
@@ -157,6 +177,12 @@ async function updateRootPackageJson(
`bun run --filter ${backendPackageName} db:migrate`;
}
}
if (options.dbSetup === "docker") {
scripts["db:start"] = `bun run --filter ${backendPackageName} db:start`;
scripts["db:watch"] = `bun run --filter ${backendPackageName} db:watch`;
scripts["db:stop"] = `bun run --filter ${backendPackageName} db:stop`;
scripts["db:down"] = `bun run --filter ${backendPackageName} db:down`;
}
}
if (options.addons.includes("biome")) {
@@ -246,6 +272,13 @@ async function updateServerPackageJson(
}
}
if (options.dbSetup === "docker") {
scripts["db:start"] = "docker compose up -d";
scripts["db:watch"] = "docker compose up";
scripts["db:stop"] = "docker compose stop";
scripts["db:down"] = "docker compose down";
}
await fs.writeJson(serverPackageJsonPath, serverPackageJson, {
spaces: 2,
});

View File

@@ -833,6 +833,26 @@ export async function handleExtras(
}
}
export async function setupDockerComposeTemplates(
projectDir: string,
context: ProjectConfig,
): Promise<void> {
if (context.dbSetup !== "docker" || context.database === "none") {
return;
}
const serverAppDir = path.join(projectDir, "apps/server");
const dockerSrcDir = path.join(
PKG_ROOT,
`templates/db-setup/docker-compose/${context.database}`,
);
if (await fs.pathExists(dockerSrcDir)) {
await processAndCopyFiles("**/*", dockerSrcDir, serverAppDir, context);
} else {
}
}
export async function setupDeploymentTemplates(
projectDir: string,
context: ProjectConfig,

View File

@@ -6,6 +6,7 @@ import pc from "picocolors";
import type { ProjectConfig } from "../../types";
import { addPackageDependency } from "../../utils/add-package-deps";
import { setupCloudflareD1 } from "../database-providers/d1-setup";
import { setupDockerCompose } from "../database-providers/docker-compose-setup";
import { setupMongoDBAtlas } from "../database-providers/mongodb-atlas-setup";
import { setupNeonPostgres } from "../database-providers/neon-setup";
import { setupPrismaPostgres } from "../database-providers/prisma-postgres-setup";
@@ -76,7 +77,9 @@ export async function setupDatabase(config: ProjectConfig): Promise<void> {
});
}
if (database === "sqlite" && dbSetup === "turso") {
if (dbSetup === "docker") {
await setupDockerCompose(config);
} else if (database === "sqlite" && dbSetup === "turso") {
await setupTurso(config);
} else if (database === "sqlite" && dbSetup === "d1") {
await setupCloudflareD1(config);