mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 23:21:08 +02:00
Validator now checks 52 Dockerfiles (web + backend + service). Fixed 10 missing COPYs across backends, services, and nestjs-base. Generator also supports backend/service Dockerfiles with markers. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
512 lines
16 KiB
JavaScript
Executable file
512 lines
16 KiB
JavaScript
Executable file
#!/usr/bin/env node
|
|
|
|
// Generate COPY statements in Dockerfiles from package.json workspace dependencies.
|
|
//
|
|
// Processes:
|
|
// - apps/{name}/apps/web/Dockerfile (web apps)
|
|
// - apps/{name}/apps/backend/Dockerfile (app backends, only if markers exist)
|
|
// - services/{name}/Dockerfile (services, only if markers exist)
|
|
//
|
|
// For each Dockerfile, reads the corresponding package.json,
|
|
// resolves workspace dependencies to their directory paths, and updates the
|
|
// COPY block between marker comments.
|
|
//
|
|
// Usage:
|
|
// node scripts/generate-dockerfiles.mjs # Update all Dockerfiles
|
|
// node scripts/generate-dockerfiles.mjs --check # Check only, exit 1 if changes needed
|
|
|
|
import { readFileSync, writeFileSync, readdirSync, existsSync } from 'node:fs';
|
|
import { join, resolve } from 'node:path';
|
|
|
|
const ROOT = resolve(import.meta.dirname, '..');
|
|
const START_MARKER = '# --- AUTO-GENERATED COPY STATEMENTS (do not edit manually) ---';
|
|
const END_MARKER = '# --- END AUTO-GENERATED ---';
|
|
|
|
const isCheck = process.argv.includes('--check');
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Package map: package name -> directory path relative to repo root
|
|
// ---------------------------------------------------------------------------
|
|
function buildPackageMap() {
|
|
const map = new Map();
|
|
|
|
const packagesDir = join(ROOT, 'packages');
|
|
if (existsSync(packagesDir)) {
|
|
for (const entry of readdirSync(packagesDir, { withFileTypes: true })) {
|
|
if (!entry.isDirectory()) continue;
|
|
const pkgJsonPath = join(packagesDir, entry.name, 'package.json');
|
|
if (!existsSync(pkgJsonPath)) continue;
|
|
try {
|
|
const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf8'));
|
|
map.set(pkg.name, `packages/${entry.name}`);
|
|
} catch {}
|
|
}
|
|
}
|
|
|
|
const appsDir = join(ROOT, 'apps');
|
|
if (existsSync(appsDir)) {
|
|
for (const appEntry of readdirSync(appsDir, { withFileTypes: true })) {
|
|
if (!appEntry.isDirectory()) continue;
|
|
const appPkgsDir = join(appsDir, appEntry.name, 'packages');
|
|
if (!existsSync(appPkgsDir)) continue;
|
|
for (const pkgEntry of readdirSync(appPkgsDir, { withFileTypes: true })) {
|
|
if (!pkgEntry.isDirectory()) continue;
|
|
const pkgJsonPath = join(appPkgsDir, pkgEntry.name, 'package.json');
|
|
if (!existsSync(pkgJsonPath)) continue;
|
|
try {
|
|
const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf8'));
|
|
map.set(pkg.name, `apps/${appEntry.name}/packages/${pkgEntry.name}`);
|
|
} catch {}
|
|
}
|
|
}
|
|
}
|
|
|
|
// services/*/packages/*
|
|
const servicesDir = join(ROOT, 'services');
|
|
if (existsSync(servicesDir)) {
|
|
for (const svcEntry of readdirSync(servicesDir, { withFileTypes: true })) {
|
|
if (!svcEntry.isDirectory()) continue;
|
|
const svcPkgsDir = join(servicesDir, svcEntry.name, 'packages');
|
|
if (!existsSync(svcPkgsDir)) continue;
|
|
for (const pkgEntry of readdirSync(svcPkgsDir, { withFileTypes: true })) {
|
|
if (!pkgEntry.isDirectory()) continue;
|
|
const pkgJsonPath = join(svcPkgsDir, pkgEntry.name, 'package.json');
|
|
if (!existsSync(pkgJsonPath)) continue;
|
|
try {
|
|
const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf8'));
|
|
map.set(pkg.name, `services/${svcEntry.name}/packages/${pkgEntry.name}`);
|
|
} catch {}
|
|
}
|
|
}
|
|
}
|
|
|
|
return map;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Extract workspace deps from package.json
|
|
// ---------------------------------------------------------------------------
|
|
function getWorkspaceDeps(pkgJsonPath) {
|
|
const pkg = JSON.parse(readFileSync(pkgJsonPath, 'utf8'));
|
|
const deps = [];
|
|
for (const section of ['dependencies', 'devDependencies']) {
|
|
if (!pkg[section]) continue;
|
|
for (const [name, version] of Object.entries(pkg[section])) {
|
|
if (typeof version === 'string' && version.startsWith('workspace:')) {
|
|
deps.push(name);
|
|
}
|
|
}
|
|
}
|
|
return deps;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Generate the COPY block content (lines between the markers)
|
|
// ---------------------------------------------------------------------------
|
|
function generateCopyBlock(workspaceDeps, packageMap) {
|
|
const copyLines = [];
|
|
|
|
// Always include patches
|
|
copyLines.push('COPY patches/ ./patches/');
|
|
|
|
const depPaths = [];
|
|
const unknownDeps = [];
|
|
|
|
for (const dep of workspaceDeps) {
|
|
const dirPath = packageMap.get(dep);
|
|
if (!dirPath) {
|
|
unknownDeps.push(dep);
|
|
continue;
|
|
}
|
|
depPaths.push(dirPath);
|
|
}
|
|
|
|
depPaths.sort();
|
|
|
|
// Root packages first, then app-specific packages, then service packages
|
|
const rootPackages = depPaths.filter((p) => p.startsWith('packages/'));
|
|
const appPackages = depPaths.filter((p) => p.startsWith('apps/'));
|
|
const servicePackages = depPaths.filter((p) => p.startsWith('services/'));
|
|
|
|
for (const p of rootPackages) {
|
|
copyLines.push(`COPY ${p} ./${p}`);
|
|
}
|
|
for (const p of appPackages) {
|
|
copyLines.push(`COPY ${p} ./${p}`);
|
|
}
|
|
for (const p of servicePackages) {
|
|
copyLines.push(`COPY ${p} ./${p}`);
|
|
}
|
|
|
|
if (unknownDeps.length > 0) {
|
|
copyLines.push('');
|
|
copyLines.push(
|
|
`# WARNING: Unknown workspace deps (not found in workspace): ${unknownDeps.join(', ')}`
|
|
);
|
|
}
|
|
|
|
return copyLines;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Process a Dockerfile: insert/update markers and generated COPY block.
|
|
// Returns the updated content string.
|
|
// ---------------------------------------------------------------------------
|
|
function processDockerfile(content, appName, copyLines) {
|
|
const lines = content.split('\n');
|
|
|
|
// Collect all generated COPY paths for dedup detection
|
|
const generatedPaths = new Set();
|
|
for (const line of copyLines) {
|
|
const m = line.match(/^COPY\s+(\S+)/);
|
|
if (m) generatedPaths.add(m[1]);
|
|
}
|
|
|
|
// If markers already exist, replace the block between them and clean up duplicates after
|
|
if (content.includes(START_MARKER)) {
|
|
return updateExistingMarkers(lines, copyLines, generatedPaths);
|
|
}
|
|
|
|
// Otherwise, find the right place to insert markers
|
|
return insertMarkersAndBlock(lines, copyLines, generatedPaths, appName);
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Update content that already has markers
|
|
// ---------------------------------------------------------------------------
|
|
function updateExistingMarkers(lines, copyLines, generatedPaths) {
|
|
const result = [];
|
|
let i = 0;
|
|
|
|
// Copy lines up to and including the start marker
|
|
while (i < lines.length) {
|
|
result.push(lines[i]);
|
|
if (lines[i].trim() === START_MARKER) {
|
|
i++;
|
|
break;
|
|
}
|
|
i++;
|
|
}
|
|
|
|
// Skip old content until end marker
|
|
while (i < lines.length) {
|
|
if (lines[i].trim() === END_MARKER) break;
|
|
i++;
|
|
}
|
|
|
|
// Insert new copy lines
|
|
for (const line of copyLines) {
|
|
result.push(line);
|
|
}
|
|
|
|
// Add end marker
|
|
if (i < lines.length && lines[i].trim() === END_MARKER) {
|
|
result.push(lines[i]);
|
|
i++;
|
|
}
|
|
|
|
// Now process remaining lines, removing duplicates of what we generated
|
|
// and cleaning up orphaned comments about shared packages/patches
|
|
while (i < lines.length) {
|
|
const trimmed = lines[i].trim();
|
|
|
|
// Remove duplicate COPY lines that are already in the generated block
|
|
const copyMatch = trimmed.match(/^COPY\s+(\S+)/);
|
|
if (copyMatch) {
|
|
const srcPath = copyMatch[1];
|
|
const normalized = srcPath.replace(/\/$/, '');
|
|
// Exact match
|
|
if (
|
|
generatedPaths.has(srcPath) ||
|
|
generatedPaths.has(normalized) ||
|
|
generatedPaths.has(normalized + '/')
|
|
) {
|
|
i++;
|
|
continue;
|
|
}
|
|
// Broader COPY (e.g., apps/todo/packages) where specific sub-paths are generated
|
|
// Remove if ANY generated path starts with this broader path
|
|
const isBroaderCovered = [...generatedPaths].some((gp) => gp.startsWith(normalized + '/'));
|
|
if (isBroaderCovered) {
|
|
i++;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Remove orphaned comments that referred to the old manual copy section
|
|
if (trimmed.startsWith('#')) {
|
|
const isOrphanedComment =
|
|
trimmed.match(/^#\s*Copy\s+(shared\s+)?packages\s+needed\s+by/i) ||
|
|
trimmed.match(/^#\s*Copy\s+patches/i) ||
|
|
trimmed.match(/^#\s*Copy\s+\w+\s+(content\s+)?package$/i) ||
|
|
trimmed.match(/^#\s*Copy\s+\w+\s+shared\s+package$/i) ||
|
|
trimmed.match(/^#\s*Copy\s+\w+\s+packages(\s+and\s+web)?$/i);
|
|
if (isOrphanedComment) {
|
|
// Check if the next non-empty line is a COPY that was removed or will be removed
|
|
let j = i + 1;
|
|
while (j < lines.length && lines[j].trim() === '') j++;
|
|
const nextTrimmed = j < lines.length ? lines[j].trim() : '';
|
|
const nextCopy = nextTrimmed.match(/^COPY\s+(\S+)/);
|
|
let nextIsRemoved = false;
|
|
if (nextCopy) {
|
|
const np = nextCopy[1].replace(/\/$/, '');
|
|
nextIsRemoved =
|
|
generatedPaths.has(nextCopy[1]) ||
|
|
generatedPaths.has(np) ||
|
|
generatedPaths.has(np + '/') ||
|
|
[...generatedPaths].some((gp) => gp.startsWith(np + '/'));
|
|
}
|
|
// Remove if the associated COPY is removed, or if the comment
|
|
// is about something now handled by the generated block
|
|
if (nextIsRemoved || !nextCopy) {
|
|
i++;
|
|
continue;
|
|
}
|
|
}
|
|
}
|
|
|
|
result.push(lines[i]);
|
|
i++;
|
|
}
|
|
|
|
// Ensure blank line after END_MARKER
|
|
const cleaned = cleanBlankLines(result);
|
|
for (let k = 0; k < cleaned.length - 1; k++) {
|
|
if (cleaned[k].trim() === END_MARKER && cleaned[k + 1].trim() !== '') {
|
|
cleaned.splice(k + 1, 0, '');
|
|
}
|
|
}
|
|
return cleaned.join('\n');
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Insert markers into a Dockerfile that doesn't have them yet
|
|
// ---------------------------------------------------------------------------
|
|
function insertMarkersAndBlock(lines, copyLines, generatedPaths, appName) {
|
|
const result = [];
|
|
let i = 0;
|
|
|
|
// Pass through until we find COPY pnpm-lock.yaml
|
|
let foundLockfile = false;
|
|
while (i < lines.length) {
|
|
result.push(lines[i]);
|
|
if (lines[i].trim().match(/^COPY\s+pnpm-lock\.yaml/)) {
|
|
foundLockfile = true;
|
|
i++;
|
|
break;
|
|
}
|
|
i++;
|
|
}
|
|
|
|
if (!foundLockfile) {
|
|
// Can't find the pattern, return unchanged
|
|
return lines.join('\n');
|
|
}
|
|
|
|
// Now skip all lines that are part of the old manual copy block:
|
|
// - blank lines
|
|
// - comments about shared packages / patches
|
|
// - COPY statements for packages/* or patches
|
|
// - COPY statements for apps/{appName}/packages/*
|
|
// Stop when we hit the COPY for apps/{appName}/apps/web or RUN install
|
|
while (i < lines.length) {
|
|
const trimmed = lines[i].trim();
|
|
|
|
if (trimmed === '') {
|
|
i++;
|
|
continue;
|
|
}
|
|
|
|
// Comments about shared packages, patches, or app packages
|
|
if (
|
|
trimmed.startsWith('#') &&
|
|
(trimmed.match(/copy\s+(shared\s+)?packages/i) ||
|
|
trimmed.match(/copy\s+patches/i) ||
|
|
trimmed.match(/copy\s+\w+\s+(content\s+)?package/i))
|
|
) {
|
|
i++;
|
|
continue;
|
|
}
|
|
|
|
// COPY for packages/* or patches
|
|
if (trimmed.match(/^COPY\s+(packages|patches)[/\s]/)) {
|
|
i++;
|
|
continue;
|
|
}
|
|
|
|
// COPY for apps/{appName}/packages (app-specific workspace packages)
|
|
if (appName && trimmed.match(new RegExp(`^COPY\\s+apps/${appName}/packages`))) {
|
|
i++;
|
|
continue;
|
|
}
|
|
|
|
// End of the block we want to replace
|
|
break;
|
|
}
|
|
|
|
// Insert the marker block
|
|
result.push('');
|
|
result.push(START_MARKER);
|
|
for (const line of copyLines) {
|
|
result.push(line);
|
|
}
|
|
result.push(END_MARKER);
|
|
|
|
// Ensure a blank line before the next section
|
|
if (i < lines.length && lines[i].trim() !== '') {
|
|
result.push('');
|
|
}
|
|
|
|
// Add remaining lines, removing any further duplicates
|
|
while (i < lines.length) {
|
|
const trimmed = lines[i].trim();
|
|
const copyMatch = trimmed.match(/^COPY\s+(\S+)/);
|
|
if (copyMatch) {
|
|
const srcPath = copyMatch[1].replace(/\/$/, '');
|
|
if (
|
|
generatedPaths.has(copyMatch[1]) ||
|
|
generatedPaths.has(srcPath) ||
|
|
generatedPaths.has(srcPath + '/')
|
|
) {
|
|
i++;
|
|
continue;
|
|
}
|
|
}
|
|
result.push(lines[i]);
|
|
i++;
|
|
}
|
|
|
|
return cleanBlankLines(result).join('\n');
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Remove runs of 3+ consecutive blank lines, keep at most 1
|
|
// ---------------------------------------------------------------------------
|
|
function cleanBlankLines(lines) {
|
|
const result = [];
|
|
let blankCount = 0;
|
|
for (const line of lines) {
|
|
if (line.trim() === '') {
|
|
blankCount++;
|
|
if (blankCount <= 1) {
|
|
result.push(line);
|
|
}
|
|
} else {
|
|
blankCount = 0;
|
|
result.push(line);
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Process a single Dockerfile entry (shared logic for all types)
|
|
// ---------------------------------------------------------------------------
|
|
function processEntry(dockerfilePath, pkgJsonPath, relPath, appName, packageMap, stats) {
|
|
if (!existsSync(pkgJsonPath)) {
|
|
console.error(` ERROR: ${relPath} - package.json not found`);
|
|
stats.errors++;
|
|
return;
|
|
}
|
|
|
|
const original = readFileSync(dockerfilePath, 'utf8');
|
|
const workspaceDeps = getWorkspaceDeps(pkgJsonPath);
|
|
const copyLines = generateCopyBlock(workspaceDeps, packageMap);
|
|
const updated = processDockerfile(original, appName, copyLines);
|
|
|
|
if (updated !== original) {
|
|
if (isCheck) {
|
|
console.log(` NEEDS UPDATE: ${relPath}`);
|
|
stats.changed++;
|
|
} else {
|
|
writeFileSync(dockerfilePath, updated, 'utf8');
|
|
console.log(` UPDATED: ${relPath} (${workspaceDeps.length} deps)`);
|
|
stats.changed++;
|
|
}
|
|
} else {
|
|
console.log(` OK: ${relPath} (${workspaceDeps.length} deps)`);
|
|
stats.unchanged++;
|
|
}
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Main
|
|
// ---------------------------------------------------------------------------
|
|
function main() {
|
|
const packageMap = buildPackageMap();
|
|
const appsDir = join(ROOT, 'apps');
|
|
const servicesDir = join(ROOT, 'services');
|
|
const stats = { changed: 0, unchanged: 0, errors: 0 };
|
|
|
|
const appDirs = readdirSync(appsDir, { withFileTypes: true })
|
|
.filter((e) => e.isDirectory())
|
|
.map((e) => e.name)
|
|
.sort();
|
|
|
|
// --- Web app Dockerfiles (always process, insert markers if missing) ---
|
|
console.log('=== Web App Dockerfiles ===');
|
|
for (const appName of appDirs) {
|
|
const dockerfilePath = join(appsDir, appName, 'apps', 'web', 'Dockerfile');
|
|
if (!existsSync(dockerfilePath)) continue;
|
|
|
|
const pkgJsonPath = join(appsDir, appName, 'apps', 'web', 'package.json');
|
|
const relPath = `apps/${appName}/apps/web/Dockerfile`;
|
|
processEntry(dockerfilePath, pkgJsonPath, relPath, appName, packageMap, stats);
|
|
}
|
|
|
|
// --- Backend app Dockerfiles (only if markers already exist) ---
|
|
console.log('\n=== Backend App Dockerfiles ===');
|
|
for (const appName of appDirs) {
|
|
const dockerfilePath = join(appsDir, appName, 'apps', 'backend', 'Dockerfile');
|
|
if (!existsSync(dockerfilePath)) continue;
|
|
|
|
const content = readFileSync(dockerfilePath, 'utf8');
|
|
if (!content.includes(START_MARKER)) {
|
|
console.log(` SKIP: apps/${appName}/apps/backend/Dockerfile (no markers)`);
|
|
continue;
|
|
}
|
|
|
|
const pkgJsonPath = join(appsDir, appName, 'apps', 'backend', 'package.json');
|
|
const relPath = `apps/${appName}/apps/backend/Dockerfile`;
|
|
processEntry(dockerfilePath, pkgJsonPath, relPath, appName, packageMap, stats);
|
|
}
|
|
|
|
// --- Service Dockerfiles (only if markers already exist) ---
|
|
console.log('\n=== Service Dockerfiles ===');
|
|
if (existsSync(servicesDir)) {
|
|
const svcDirs = readdirSync(servicesDir, { withFileTypes: true })
|
|
.filter((e) => e.isDirectory())
|
|
.map((e) => e.name)
|
|
.sort();
|
|
|
|
for (const svcName of svcDirs) {
|
|
const dockerfilePath = join(servicesDir, svcName, 'Dockerfile');
|
|
if (!existsSync(dockerfilePath)) continue;
|
|
|
|
const content = readFileSync(dockerfilePath, 'utf8');
|
|
if (!content.includes(START_MARKER)) {
|
|
console.log(` SKIP: services/${svcName}/Dockerfile (no markers)`);
|
|
continue;
|
|
}
|
|
|
|
const pkgJsonPath = join(servicesDir, svcName, 'package.json');
|
|
const relPath = `services/${svcName}/Dockerfile`;
|
|
processEntry(dockerfilePath, pkgJsonPath, relPath, svcName, packageMap, stats);
|
|
}
|
|
}
|
|
|
|
console.log('');
|
|
console.log(
|
|
`Processed ${stats.changed + stats.unchanged + stats.errors} Dockerfiles: ${stats.changed} ${isCheck ? 'need updates' : 'updated'}, ${stats.unchanged} unchanged, ${stats.errors} errors`
|
|
);
|
|
|
|
if (isCheck && stats.changed > 0) {
|
|
console.log('\nRun `pnpm generate:dockerfiles` to fix.');
|
|
process.exit(1);
|
|
}
|
|
if (stats.errors > 0) {
|
|
process.exit(1);
|
|
}
|
|
}
|
|
|
|
main();
|