что то амт

This commit is contained in:
2026-03-20 16:48:45 +07:00
parent d935b7374d
commit 430c7f456e
5 changed files with 505 additions and 155 deletions

View File

@@ -11,8 +11,9 @@ function formatAuditSummary(event, details = {}) {
'user.created': `created user ${details.username || ''}`.trim(),
'user.updated': `updated user ${details.username || ''}`.trim(),
'user.deleted': `deleted user ${details.username || ''}`.trim(),
'backup.created': `created backup ${details.filename || details.files?.map((file) => file.filename).join(', ') || ''}`.trim(),
'backup.auto_created': `created scheduled backup ${details.files?.map((file) => file.filename).join(', ') || ''}`.trim(),
'backup.created': `created backup ${details.filename || ''}`.trim(),
'backup.auto_created': `created scheduled backup ${details.filename || ''}`.trim(),
'backup.restored': `restored backup ${details.filename || ''}`.trim(),
'settings.updated': 'updated system settings',
'table.created': `created table ${details.table || ''}`.trim(),
'table.deleted': `deleted table ${details.table || ''}`.trim(),

View File

@@ -1,4 +1,5 @@
const fs = require('fs');
const os = require('os');
const path = require('path');
const { spawn } = require('child_process');
@@ -7,6 +8,7 @@ const USERS_FILE = path.join(__dirname, '..', '..', 'users.json');
const AUDIT_LOG_FILE = path.join(__dirname, '..', '..', 'audit.log');
const SETTINGS_FILE = path.join(__dirname, '..', '..', 'settings.json');
const BACKUP_PREFIX = 'backup-';
const BACKUP_EXTENSION = '.tar.gz';
function ensureBackupsDir() {
fs.mkdirSync(BACKUPS_DIR, { recursive: true });
@@ -16,6 +18,42 @@ function makeBackupStamp() {
return new Date().toISOString().replace(/[:.]/g, '-');
}
function makeTempDir() {
return fs.mkdtempSync(path.join(os.tmpdir(), 'pg-admin-backup-'));
}
function cleanupDir(dirPath) {
fs.rmSync(dirPath, { recursive: true, force: true });
}
function runCommand(command, args, options = {}) {
return new Promise((resolve, reject) => {
const child = spawn(command, args, options);
const stdout = [];
const stderr = [];
if (child.stdout) {
child.stdout.on('data', (chunk) => stdout.push(chunk));
}
if (child.stderr) {
child.stderr.on('data', (chunk) => stderr.push(chunk));
}
child.on('error', reject);
child.on('close', (code) => {
if (code !== 0) {
reject(new Error(Buffer.concat(stderr).toString('utf8') || `${command} exited with code ${code}`));
return;
}
resolve({
stdout: Buffer.concat(stdout).toString('utf8'),
stderr: Buffer.concat(stderr).toString('utf8'),
});
});
});
}
async function collectAppSnapshot(pool, actor = 'system') {
const tablesResult = await pool.query(`
SELECT table_name
@@ -49,7 +87,7 @@ async function collectAppSnapshot(pool, actor = 'system') {
meta: {
createdAt: new Date().toISOString(),
createdBy: actor,
version: 1,
version: 2,
},
users: fs.existsSync(USERS_FILE) ? JSON.parse(fs.readFileSync(USERS_FILE, 'utf8')) : { users: [] },
settings: fs.existsSync(SETTINGS_FILE) ? JSON.parse(fs.readFileSync(SETTINGS_FILE, 'utf8')) : null,
@@ -60,60 +98,72 @@ async function collectAppSnapshot(pool, actor = 'system') {
};
}
function createArchive(tempDir, archivePath, fileNames) {
return runCommand('tar', ['-czf', archivePath, '-C', tempDir, ...fileNames], {
stdio: ['ignore', 'pipe', 'pipe'],
});
}
function extractArchive(archivePath, tempDir) {
return runCommand('tar', ['-xzf', archivePath, '-C', tempDir], {
stdio: ['ignore', 'pipe', 'pipe'],
});
}
function runPgDump() {
return new Promise((resolve, reject) => {
const args = [
'-h', process.env.DB_HOST || 'db',
'-p', String(process.env.DB_PORT || '5432'),
'-U', process.env.DB_USER || 'postgres',
'-d', process.env.DB_NAME || 'postgres',
'--clean',
'--if-exists',
'--no-owner',
'--no-privileges',
];
const args = [
'-h', process.env.DB_HOST || 'db',
'-p', String(process.env.DB_PORT || '5432'),
'-U', process.env.DB_USER || 'postgres',
'-d', process.env.DB_NAME || 'postgres',
'--clean',
'--if-exists',
'--no-owner',
'--no-privileges',
];
const child = spawn('pg_dump', args, {
env: {
...process.env,
PGPASSWORD: process.env.DB_PASSWORD || '',
},
stdio: ['ignore', 'pipe', 'pipe'],
});
return runCommand('pg_dump', args, {
env: {
...process.env,
PGPASSWORD: process.env.DB_PASSWORD || '',
},
stdio: ['ignore', 'pipe', 'pipe'],
}).then((result) => result.stdout);
}
const stdout = [];
const stderr = [];
child.stdout.on('data', (chunk) => stdout.push(chunk));
child.stderr.on('data', (chunk) => stderr.push(chunk));
child.on('error', reject);
child.on('close', (code) => {
if (code !== 0) {
reject(new Error(Buffer.concat(stderr).toString('utf8') || `pg_dump exited with code ${code}`));
return;
}
resolve(Buffer.concat(stdout).toString('utf8'));
});
function runPsqlFile(filePath) {
const args = [
'-h', process.env.DB_HOST || 'db',
'-p', String(process.env.DB_PORT || '5432'),
'-U', process.env.DB_USER || 'postgres',
'-d', process.env.DB_NAME || 'postgres',
'-v', 'ON_ERROR_STOP=1',
'-f', filePath,
];
return runCommand('psql', args, {
env: {
...process.env,
PGPASSWORD: process.env.DB_PASSWORD || '',
},
stdio: ['ignore', 'pipe', 'pipe'],
});
}
function formatBackupEntry(filePath, filename) {
const stats = fs.statSync(filePath);
const kind = filename.endsWith('.sql') ? 'database' : 'application';
const match = filename.match(/^backup-(.+?)-(db|app)\.(sql|json)$/);
return {
filename,
size: stats.size,
createdAt: stats.birthtime.toISOString(),
kind,
bundle: match ? match[1] : null,
kind: 'archive',
};
}
function pruneBackups(keepLast = 14) {
ensureBackupsDir();
const maxFiles = Math.max(1, keepLast) * 2;
const files = fs.readdirSync(BACKUPS_DIR)
.filter((name) => name.startsWith(BACKUP_PREFIX) && (name.endsWith('.json') || name.endsWith('.sql')))
.filter((name) => name.startsWith(BACKUP_PREFIX) && name.endsWith(BACKUP_EXTENSION))
.map((name) => ({
name,
filePath: path.join(BACKUPS_DIR, name),
@@ -121,46 +171,49 @@ function pruneBackups(keepLast = 14) {
}))
.sort((a, b) => b.mtimeMs - a.mtimeMs);
files.slice(maxFiles).forEach((file) => {
files.slice(Math.max(1, keepLast)).forEach((file) => {
fs.unlinkSync(file.filePath);
});
}
async function createBackup(pool, actor = 'system', options = {}) {
ensureBackupsDir();
const tempDir = makeTempDir();
const stamp = makeBackupStamp();
const createdAt = new Date().toISOString();
const includeAppSnapshot = options.includeAppSnapshot !== false;
const files = [];
const archiveFilename = `${BACKUP_PREFIX}${stamp}${BACKUP_EXTENSION}`;
const archivePath = path.join(BACKUPS_DIR, archiveFilename);
const fileNames = [];
const sqlDump = await runPgDump();
const sqlFilename = `${BACKUP_PREFIX}${stamp}-db.sql`;
const sqlPath = path.join(BACKUPS_DIR, sqlFilename);
fs.writeFileSync(sqlPath, sqlDump, 'utf8');
files.push(formatBackupEntry(sqlPath, sqlFilename));
try {
const sqlDump = await runPgDump();
const sqlFilename = 'database.sql';
fs.writeFileSync(path.join(tempDir, sqlFilename), sqlDump, 'utf8');
fileNames.push(sqlFilename);
if (includeAppSnapshot) {
const snapshot = await collectAppSnapshot(pool, actor);
const jsonFilename = `${BACKUP_PREFIX}${stamp}-app.json`;
const jsonPath = path.join(BACKUPS_DIR, jsonFilename);
fs.writeFileSync(jsonPath, JSON.stringify(snapshot, null, 2), 'utf8');
files.push(formatBackupEntry(jsonPath, jsonFilename));
if (options.includeAppSnapshot !== false) {
const snapshot = await collectAppSnapshot(pool, actor);
const jsonFilename = 'application.json';
fs.writeFileSync(path.join(tempDir, jsonFilename), JSON.stringify(snapshot, null, 2), 'utf8');
fileNames.push(jsonFilename);
}
await createArchive(tempDir, archivePath, fileNames);
if (options.keepLast) {
pruneBackups(options.keepLast);
}
return formatBackupEntry(archivePath, archiveFilename);
} finally {
cleanupDir(tempDir);
}
if (options.keepLast) {
pruneBackups(options.keepLast);
}
return {
createdAt,
files,
};
}
function listBackups() {
ensureBackupsDir();
return fs.readdirSync(BACKUPS_DIR)
.filter((name) => name.startsWith(BACKUP_PREFIX) && (name.endsWith('.json') || name.endsWith('.sql')))
.filter((name) => name.startsWith(BACKUP_PREFIX) && name.endsWith(BACKUP_EXTENSION))
.map((name) => formatBackupEntry(path.join(BACKUPS_DIR, name), name))
.sort((a, b) => b.createdAt.localeCompare(a.createdAt));
}
@@ -173,10 +226,55 @@ function getBackupPath(filename) {
return filePath;
}
async function restoreBackup(filename, options = {}) {
ensureBackupsDir();
const archivePath = getBackupPath(filename);
const tempDir = makeTempDir();
try {
await extractArchive(archivePath, tempDir);
const sqlPath = path.join(tempDir, 'database.sql');
const jsonPath = path.join(tempDir, 'application.json');
if (!fs.existsSync(sqlPath)) {
throw new Error('Archive does not contain database.sql');
}
await runPsqlFile(sqlPath);
let restoredAppSnapshot = false;
if (options.restoreAppSnapshot !== false && fs.existsSync(jsonPath)) {
const snapshot = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
if (snapshot.users) {
fs.writeFileSync(USERS_FILE, JSON.stringify(snapshot.users, null, 2), 'utf8');
}
if (snapshot.settings) {
fs.writeFileSync(SETTINGS_FILE, JSON.stringify(snapshot.settings, null, 2), 'utf8');
}
if (Array.isArray(snapshot.audit)) {
fs.writeFileSync(AUDIT_LOG_FILE, `${snapshot.audit.join('\n')}${snapshot.audit.length ? '\n' : ''}`, 'utf8');
}
restoredAppSnapshot = true;
}
return {
filename,
restoredDatabase: true,
restoredAppSnapshot,
};
} finally {
cleanupDir(tempDir);
}
}
module.exports = {
BACKUPS_DIR,
createBackup,
getBackupPath,
listBackups,
pruneBackups,
restoreBackup,
};