183 lines
5.9 KiB
JavaScript
183 lines
5.9 KiB
JavaScript
const fs = require('fs');
|
|
const path = require('path');
|
|
const { spawn } = require('child_process');
|
|
|
|
const BACKUPS_DIR = path.join(__dirname, '..', '..', 'backups');
|
|
const USERS_FILE = path.join(__dirname, '..', '..', 'users.json');
|
|
const AUDIT_LOG_FILE = path.join(__dirname, '..', '..', 'audit.log');
|
|
const SETTINGS_FILE = path.join(__dirname, '..', '..', 'settings.json');
|
|
const BACKUP_PREFIX = 'backup-';
|
|
|
|
function ensureBackupsDir() {
|
|
fs.mkdirSync(BACKUPS_DIR, { recursive: true });
|
|
}
|
|
|
|
function makeBackupStamp() {
|
|
return new Date().toISOString().replace(/[:.]/g, '-');
|
|
}
|
|
|
|
async function collectAppSnapshot(pool, actor = 'system') {
|
|
const tablesResult = await pool.query(`
|
|
SELECT table_name
|
|
FROM information_schema.tables
|
|
WHERE table_schema = 'public'
|
|
ORDER BY table_name
|
|
`);
|
|
|
|
const tables = [];
|
|
for (const row of tablesResult.rows) {
|
|
const tableName = row.table_name;
|
|
const structure = await pool.query(`
|
|
SELECT
|
|
c.column_name AS name,
|
|
c.data_type AS type,
|
|
c.is_nullable AS nullable,
|
|
c.column_default AS default_value
|
|
FROM information_schema.columns c
|
|
WHERE c.table_name = $1 AND c.table_schema = 'public'
|
|
ORDER BY c.ordinal_position
|
|
`, [tableName]);
|
|
const data = await pool.query(`SELECT * FROM "${tableName}"`);
|
|
tables.push({
|
|
name: tableName,
|
|
structure: structure.rows,
|
|
rows: data.rows,
|
|
});
|
|
}
|
|
|
|
return {
|
|
meta: {
|
|
createdAt: new Date().toISOString(),
|
|
createdBy: actor,
|
|
version: 1,
|
|
},
|
|
users: fs.existsSync(USERS_FILE) ? JSON.parse(fs.readFileSync(USERS_FILE, 'utf8')) : { users: [] },
|
|
settings: fs.existsSync(SETTINGS_FILE) ? JSON.parse(fs.readFileSync(SETTINGS_FILE, 'utf8')) : null,
|
|
audit: fs.existsSync(AUDIT_LOG_FILE)
|
|
? fs.readFileSync(AUDIT_LOG_FILE, 'utf8').split(/\r?\n/).filter(Boolean)
|
|
: [],
|
|
tables,
|
|
};
|
|
}
|
|
|
|
function runPgDump() {
|
|
return new Promise((resolve, reject) => {
|
|
const args = [
|
|
'-h', process.env.DB_HOST || 'db',
|
|
'-p', String(process.env.DB_PORT || '5432'),
|
|
'-U', process.env.DB_USER || 'postgres',
|
|
'-d', process.env.DB_NAME || 'postgres',
|
|
'--clean',
|
|
'--if-exists',
|
|
'--no-owner',
|
|
'--no-privileges',
|
|
];
|
|
|
|
const child = spawn('pg_dump', args, {
|
|
env: {
|
|
...process.env,
|
|
PGPASSWORD: process.env.DB_PASSWORD || '',
|
|
},
|
|
stdio: ['ignore', 'pipe', 'pipe'],
|
|
});
|
|
|
|
const stdout = [];
|
|
const stderr = [];
|
|
child.stdout.on('data', (chunk) => stdout.push(chunk));
|
|
child.stderr.on('data', (chunk) => stderr.push(chunk));
|
|
child.on('error', reject);
|
|
child.on('close', (code) => {
|
|
if (code !== 0) {
|
|
reject(new Error(Buffer.concat(stderr).toString('utf8') || `pg_dump exited with code ${code}`));
|
|
return;
|
|
}
|
|
resolve(Buffer.concat(stdout).toString('utf8'));
|
|
});
|
|
});
|
|
}
|
|
|
|
function formatBackupEntry(filePath, filename) {
|
|
const stats = fs.statSync(filePath);
|
|
const kind = filename.endsWith('.sql') ? 'database' : 'application';
|
|
const match = filename.match(/^backup-(.+?)-(db|app)\.(sql|json)$/);
|
|
return {
|
|
filename,
|
|
size: stats.size,
|
|
createdAt: stats.birthtime.toISOString(),
|
|
kind,
|
|
bundle: match ? match[1] : null,
|
|
};
|
|
}
|
|
|
|
function pruneBackups(keepLast = 14) {
|
|
ensureBackupsDir();
|
|
const maxFiles = Math.max(1, keepLast) * 2;
|
|
const files = fs.readdirSync(BACKUPS_DIR)
|
|
.filter((name) => name.startsWith(BACKUP_PREFIX) && (name.endsWith('.json') || name.endsWith('.sql')))
|
|
.map((name) => ({
|
|
name,
|
|
filePath: path.join(BACKUPS_DIR, name),
|
|
mtimeMs: fs.statSync(path.join(BACKUPS_DIR, name)).mtimeMs,
|
|
}))
|
|
.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
|
|
|
files.slice(maxFiles).forEach((file) => {
|
|
fs.unlinkSync(file.filePath);
|
|
});
|
|
}
|
|
|
|
async function createBackup(pool, actor = 'system', options = {}) {
|
|
ensureBackupsDir();
|
|
const stamp = makeBackupStamp();
|
|
const createdAt = new Date().toISOString();
|
|
const includeAppSnapshot = options.includeAppSnapshot !== false;
|
|
const files = [];
|
|
|
|
const sqlDump = await runPgDump();
|
|
const sqlFilename = `${BACKUP_PREFIX}${stamp}-db.sql`;
|
|
const sqlPath = path.join(BACKUPS_DIR, sqlFilename);
|
|
fs.writeFileSync(sqlPath, sqlDump, 'utf8');
|
|
files.push(formatBackupEntry(sqlPath, sqlFilename));
|
|
|
|
if (includeAppSnapshot) {
|
|
const snapshot = await collectAppSnapshot(pool, actor);
|
|
const jsonFilename = `${BACKUP_PREFIX}${stamp}-app.json`;
|
|
const jsonPath = path.join(BACKUPS_DIR, jsonFilename);
|
|
fs.writeFileSync(jsonPath, JSON.stringify(snapshot, null, 2), 'utf8');
|
|
files.push(formatBackupEntry(jsonPath, jsonFilename));
|
|
}
|
|
|
|
if (options.keepLast) {
|
|
pruneBackups(options.keepLast);
|
|
}
|
|
|
|
return {
|
|
createdAt,
|
|
files,
|
|
};
|
|
}
|
|
|
|
function listBackups() {
|
|
ensureBackupsDir();
|
|
return fs.readdirSync(BACKUPS_DIR)
|
|
.filter((name) => name.startsWith(BACKUP_PREFIX) && (name.endsWith('.json') || name.endsWith('.sql')))
|
|
.map((name) => formatBackupEntry(path.join(BACKUPS_DIR, name), name))
|
|
.sort((a, b) => b.createdAt.localeCompare(a.createdAt));
|
|
}
|
|
|
|
function getBackupPath(filename) {
|
|
const filePath = path.join(BACKUPS_DIR, filename);
|
|
if (!filePath.startsWith(BACKUPS_DIR) || !fs.existsSync(filePath)) {
|
|
throw new Error('Backup not found');
|
|
}
|
|
return filePath;
|
|
}
|
|
|
|
module.exports = {
|
|
BACKUPS_DIR,
|
|
createBackup,
|
|
getBackupPath,
|
|
listBackups,
|
|
pruneBackups,
|
|
};
|