Add automatic documentation synchronization system
Implements Option C (Hybrid) solution to prevent outdated documentation: New Features: - Pre-commit git hook that validates documentation before every commit - npm run docs:check - Validates README against current project state - npm run docs:sync - Automatically updates NPM Scripts section in README What gets checked: - NPM Scripts in package.json vs README - API routes in app/api/* vs README - App structure (directories in app/) vs README - Components vs README - Scripts vs README Workflow: 1. Make code changes 2. git commit triggers pre-commit hook 3. Hook warns if documentation is outdated 4. Run docs:sync to auto-update or edit manually 5. Commit with updated README Benefits: - No more forgetting to update README - Non-blocking (can use --no-verify if needed) - Automatic NPM scripts synchronization - Clear warnings show exactly what needs updating Scripts added: - scripts/check-docs.js - Validation script - scripts/sync-docs.js - Synchronization script - .git/hooks/pre-commit - Git hook (not tracked) Documentation: - Added complete workflow section in README - Examples and usage tips included 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
216
scripts/check-docs.js
Executable file
216
scripts/check-docs.js
Executable file
@@ -0,0 +1,216 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Documentation validation script
|
||||
* Checks if README.md is up to date with the current project structure
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Colors for terminal output
|
||||
const colors = {
|
||||
reset: '\x1b[0m',
|
||||
red: '\x1b[31m',
|
||||
yellow: '\x1b[33m',
|
||||
green: '\x1b[32m',
|
||||
cyan: '\x1b[36m',
|
||||
};
|
||||
|
||||
function log(message, color = 'reset') {
|
||||
console.log(`${colors[color]}${message}${colors.reset}`);
|
||||
}
|
||||
|
||||
// Read README.md
|
||||
const readmePath = path.join(__dirname, '..', 'README.md');
|
||||
const readme = fs.readFileSync(readmePath, 'utf-8');
|
||||
|
||||
let issuesFound = [];
|
||||
|
||||
/**
|
||||
* Check 1: NPM Scripts in package.json vs README
|
||||
*/
|
||||
function checkNpmScripts() {
|
||||
const packageJson = require('../package.json');
|
||||
const scripts = Object.keys(packageJson.scripts);
|
||||
|
||||
const missingScripts = scripts.filter(script => {
|
||||
const patterns = [
|
||||
new RegExp(`npm run ${script}`, 'g'),
|
||||
new RegExp(`\`${script}\``, 'g'),
|
||||
];
|
||||
return !patterns.some(pattern => pattern.test(readme));
|
||||
});
|
||||
|
||||
if (missingScripts.length > 0) {
|
||||
issuesFound.push({
|
||||
section: 'NPM Scripts',
|
||||
issues: missingScripts.map(s => `Script "${s}" not documented`),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check 2: API Routes in app/api vs README
|
||||
*/
|
||||
function checkApiRoutes() {
|
||||
const apiDir = path.join(__dirname, '..', 'app', 'api');
|
||||
if (!fs.existsSync(apiDir)) return;
|
||||
|
||||
const apiRoutes = [];
|
||||
|
||||
function scanDir(dir, prefix = '') {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
const routePath = path.join(prefix, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
// Skip if it's a dynamic route segment in the path (we'll check parent)
|
||||
if (!entry.name.startsWith('[')) {
|
||||
apiRoutes.push(routePath.replace(/\\/g, '/'));
|
||||
}
|
||||
scanDir(fullPath, routePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scanDir(apiDir);
|
||||
|
||||
const missingRoutes = apiRoutes.filter(route => {
|
||||
const searchPattern = route.replace(/\\/g, '/');
|
||||
return !readme.includes(searchPattern);
|
||||
});
|
||||
|
||||
if (missingRoutes.length > 0) {
|
||||
issuesFound.push({
|
||||
section: 'API Routes',
|
||||
issues: missingRoutes.slice(0, 5).map(r => `Route "api/${r}" not documented`),
|
||||
hasMore: missingRoutes.length > 5 ? missingRoutes.length - 5 : 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check 3: Main directories in app/ vs README
|
||||
*/
|
||||
function checkAppStructure() {
|
||||
const appDir = path.join(__dirname, '..', 'app');
|
||||
if (!fs.existsSync(appDir)) return;
|
||||
|
||||
const entries = fs.readdirSync(appDir, { withFileTypes: true });
|
||||
const directories = entries
|
||||
.filter(e => e.isDirectory() && !e.name.startsWith('.'))
|
||||
.map(e => e.name);
|
||||
|
||||
const missingDirs = directories.filter(dir => {
|
||||
// Check if directory is mentioned in project structure section
|
||||
const structureSection = readme.match(/## 📂 Projektstruktur[\s\S]*?(?=\n## |$)/);
|
||||
if (!structureSection) return true;
|
||||
return !structureSection[0].includes(`app/${dir}/`);
|
||||
});
|
||||
|
||||
if (missingDirs.length > 0) {
|
||||
issuesFound.push({
|
||||
section: 'App Structure',
|
||||
issues: missingDirs.map(d => `Directory "app/${d}/" not in project structure`),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check 4: Components vs README
|
||||
*/
|
||||
function checkComponents() {
|
||||
const componentsDir = path.join(__dirname, '..', 'components');
|
||||
if (!fs.existsSync(componentsDir)) return;
|
||||
|
||||
const components = [];
|
||||
|
||||
function scanComponents(dir, prefix = '') {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
scanComponents(fullPath, path.join(prefix, entry.name));
|
||||
} else if (entry.name.endsWith('.tsx') || entry.name.endsWith('.ts')) {
|
||||
const componentPath = path.join(prefix, entry.name).replace(/\\/g, '/');
|
||||
components.push(componentPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scanComponents(componentsDir);
|
||||
|
||||
const missingComponents = components.filter(comp => {
|
||||
return !readme.includes(comp);
|
||||
});
|
||||
|
||||
if (missingComponents.length > 0) {
|
||||
issuesFound.push({
|
||||
section: 'Components',
|
||||
issues: missingComponents.slice(0, 3).map(c => `Component "${c}" not documented`),
|
||||
hasMore: missingComponents.length > 3 ? missingComponents.length - 3 : 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check 5: Scripts vs README
|
||||
*/
|
||||
function checkScripts() {
|
||||
const scriptsDir = path.join(__dirname, '..', 'scripts');
|
||||
if (!fs.existsSync(scriptsDir)) return;
|
||||
|
||||
const scripts = fs.readdirSync(scriptsDir)
|
||||
.filter(f => f.endsWith('.js') || f.endsWith('.ts'));
|
||||
|
||||
const missingScripts = scripts.filter(script => {
|
||||
return !readme.includes(script);
|
||||
});
|
||||
|
||||
if (missingScripts.length > 0) {
|
||||
issuesFound.push({
|
||||
section: 'Scripts',
|
||||
issues: missingScripts.slice(0, 3).map(s => `Script "${s}" not in project structure`),
|
||||
hasMore: missingScripts.length > 3 ? missingScripts.length - 3 : 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Run all checks
|
||||
log('\n🔍 Checking documentation...', 'cyan');
|
||||
|
||||
checkNpmScripts();
|
||||
checkApiRoutes();
|
||||
checkAppStructure();
|
||||
checkComponents();
|
||||
checkScripts();
|
||||
|
||||
// Report results
|
||||
if (issuesFound.length === 0) {
|
||||
log('\n✅ Documentation is up to date!', 'green');
|
||||
process.exit(0);
|
||||
} else {
|
||||
log('\n⚠️ Documentation may be outdated:', 'yellow');
|
||||
log('', 'reset');
|
||||
|
||||
issuesFound.forEach(({ section, issues, hasMore }) => {
|
||||
log(` ${section}:`, 'yellow');
|
||||
issues.forEach(issue => {
|
||||
log(` - ${issue}`, 'reset');
|
||||
});
|
||||
if (hasMore) {
|
||||
log(` ... and ${hasMore} more`, 'reset');
|
||||
}
|
||||
log('', 'reset');
|
||||
});
|
||||
|
||||
log('💡 Run: npm run docs:sync', 'cyan');
|
||||
log(' Or commit with: git commit --no-verify', 'cyan');
|
||||
log('', 'reset');
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
Reference in New Issue
Block a user