2
This commit is contained in:
138
.dockerignore
Normal file
138
.dockerignore
Normal file
@@ -0,0 +1,138 @@
|
||||
# =============================================================================
|
||||
# Git
|
||||
# =============================================================================
|
||||
.git
|
||||
.gitea
|
||||
.github
|
||||
.gitlab
|
||||
.gitlab-ci.yml
|
||||
.gitattributes
|
||||
.pre-commit-config.yaml
|
||||
|
||||
# =============================================================================
|
||||
# Python virtual environments
|
||||
# =============================================================================
|
||||
.venv
|
||||
venv
|
||||
env
|
||||
ENV
|
||||
|
||||
# =============================================================================
|
||||
# Python cache
|
||||
# =============================================================================
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.so
|
||||
|
||||
# =============================================================================
|
||||
# Python tooling
|
||||
# =============================================================================
|
||||
.mypy_cache/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
.pytype/
|
||||
.pyre/
|
||||
.pyright/
|
||||
|
||||
# =============================================================================
|
||||
# Testing / Coverage
|
||||
# =============================================================================
|
||||
.coverage
|
||||
.coverage.*
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
tests/
|
||||
test/
|
||||
coverage.xml
|
||||
|
||||
# =============================================================================
|
||||
# Build artifacts
|
||||
# =============================================================================
|
||||
build/
|
||||
dist/
|
||||
.eggs/
|
||||
*.egg-info/
|
||||
pip-wheel-metadata/
|
||||
|
||||
# =============================================================================
|
||||
# Logs
|
||||
# =============================================================================
|
||||
*.log
|
||||
logs/
|
||||
log/
|
||||
|
||||
# =============================================================================
|
||||
# Node / Frontend
|
||||
# =============================================================================
|
||||
node_modules/
|
||||
.next/
|
||||
.nuxt/
|
||||
out/
|
||||
coverage/
|
||||
*.tsbuildinfo
|
||||
|
||||
# =============================================================================
|
||||
# IDE / Editor
|
||||
# =============================================================================
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# =============================================================================
|
||||
# Environment files
|
||||
# =============================================================================
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
!.env.sample
|
||||
|
||||
# =============================================================================
|
||||
# Databases
|
||||
# =============================================================================
|
||||
*.db
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
|
||||
# =============================================================================
|
||||
# Secrets
|
||||
# =============================================================================
|
||||
*.pem
|
||||
*.key
|
||||
*.crt
|
||||
*.p12
|
||||
*.pfx
|
||||
secrets/
|
||||
|
||||
# =============================================================================
|
||||
# Temporary
|
||||
# =============================================================================
|
||||
tmp/
|
||||
temp/
|
||||
*.tmp
|
||||
*.temp
|
||||
.cache/
|
||||
|
||||
# =============================================================================
|
||||
# Jupyter
|
||||
# =============================================================================
|
||||
.ipynb_checkpoints/
|
||||
|
||||
# =============================================================================
|
||||
# ML artifacts
|
||||
# =============================================================================
|
||||
*.pt
|
||||
*.pth
|
||||
*.onnx
|
||||
*.h5
|
||||
*.ckpt
|
||||
*.safetensors
|
||||
*.npy
|
||||
*.npz
|
||||
*.parquet
|
||||
65
.editorconfig
Normal file
65
.editorconfig
Normal file
@@ -0,0 +1,65 @@
|
||||
root = true
|
||||
|
||||
# =============================================================================
|
||||
# Global settings
|
||||
# =============================================================================
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
|
||||
# =============================================================================
|
||||
# Python
|
||||
# =============================================================================
|
||||
[*.py]
|
||||
max_line_length = 88
|
||||
|
||||
# =============================================================================
|
||||
# YAML (Docker, CI, compose)
|
||||
# =============================================================================
|
||||
[*.yml]
|
||||
indent_size = 2
|
||||
|
||||
[*.yaml]
|
||||
indent_size = 2
|
||||
|
||||
# =============================================================================
|
||||
# JSON
|
||||
# =============================================================================
|
||||
[*.json]
|
||||
indent_size = 2
|
||||
|
||||
# =============================================================================
|
||||
# TOML (pyproject.toml, poetry)
|
||||
# =============================================================================
|
||||
[*.toml]
|
||||
indent_size = 2
|
||||
|
||||
# =============================================================================
|
||||
# Markdown
|
||||
# =============================================================================
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
indent_size = 2
|
||||
|
||||
# =============================================================================
|
||||
# Shell scripts
|
||||
# =============================================================================
|
||||
[*.sh]
|
||||
indent_size = 2
|
||||
|
||||
# =============================================================================
|
||||
# Makefile (tabs required)
|
||||
# =============================================================================
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
# =============================================================================
|
||||
# INI / config files
|
||||
# =============================================================================
|
||||
[*.ini]
|
||||
indent_size = 2
|
||||
83
.gitattributes
vendored
Normal file
83
.gitattributes
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
# =============================================================================
|
||||
# Global text normalization
|
||||
# =============================================================================
|
||||
* text=auto eol=lf
|
||||
|
||||
# =============================================================================
|
||||
# Shell scripts (must stay LF)
|
||||
# =============================================================================
|
||||
*.sh text eol=lf
|
||||
*.bash text eol=lf
|
||||
*.zsh text eol=lf
|
||||
|
||||
# =============================================================================
|
||||
# Windows scripts
|
||||
# =============================================================================
|
||||
*.bat text eol=crlf
|
||||
*.cmd text eol=crlf
|
||||
*.ps1 text eol=crlf
|
||||
|
||||
# =============================================================================
|
||||
# Binary images
|
||||
# =============================================================================
|
||||
*.png binary
|
||||
*.jpg binary
|
||||
*.jpeg binary
|
||||
*.gif binary
|
||||
*.bmp binary
|
||||
*.webp binary
|
||||
*.ico binary
|
||||
|
||||
# SVG is text
|
||||
*.svg text
|
||||
|
||||
# =============================================================================
|
||||
# Media
|
||||
# =============================================================================
|
||||
*.mp3 binary
|
||||
*.wav binary
|
||||
*.ogg binary
|
||||
*.mp4 binary
|
||||
*.mov binary
|
||||
*.avi binary
|
||||
*.mkv binary
|
||||
|
||||
# =============================================================================
|
||||
# Fonts
|
||||
# =============================================================================
|
||||
*.eot binary
|
||||
*.ttf binary
|
||||
*.woff binary
|
||||
*.woff2 binary
|
||||
*.otf binary
|
||||
|
||||
# =============================================================================
|
||||
# Documents
|
||||
# =============================================================================
|
||||
*.pdf binary
|
||||
|
||||
# =============================================================================
|
||||
# WebAssembly
|
||||
# =============================================================================
|
||||
*.wasm binary
|
||||
|
||||
# =============================================================================
|
||||
# Jupyter
|
||||
# =============================================================================
|
||||
*.ipynb binary
|
||||
|
||||
# =============================================================================
|
||||
# Git LFS (ML / large artifacts)
|
||||
# =============================================================================
|
||||
*.pt filter=lfs diff=lfs merge=lfs -text
|
||||
*.pth filter=lfs diff=lfs merge=lfs -text
|
||||
*.onnx filter=lfs diff=lfs merge=lfs -text
|
||||
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
||||
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
||||
|
||||
# =============================================================================
|
||||
# GitHub linguist hints
|
||||
# =============================================================================
|
||||
docs/** linguist-documentation
|
||||
generated/** linguist-generated
|
||||
vendor/** linguist-vendored
|
||||
166
.gitignore
vendored
Normal file
166
.gitignore
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
# =============================================================================
|
||||
# OS
|
||||
# =============================================================================
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
Desktop.ini
|
||||
|
||||
# =============================================================================
|
||||
# IDE / Editors
|
||||
# =============================================================================
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
*.sublime-*
|
||||
*.code-workspace
|
||||
|
||||
# =============================================================================
|
||||
# Logs
|
||||
# =============================================================================
|
||||
*.log
|
||||
*.logs
|
||||
*.logs.*
|
||||
*.log.*
|
||||
logs/
|
||||
log/
|
||||
|
||||
# =============================================================================
|
||||
# Environment / Secrets
|
||||
# =============================================================================
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
!.env.sample
|
||||
!.env.template
|
||||
|
||||
# =============================================================================
|
||||
# Security keys
|
||||
# =============================================================================
|
||||
*.pem
|
||||
*.key
|
||||
*.crt
|
||||
*.p12
|
||||
*.pfx
|
||||
secrets/
|
||||
|
||||
# =============================================================================
|
||||
# Python
|
||||
# =============================================================================
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
|
||||
# Virtual environments
|
||||
.venv/
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
|
||||
# Packaging
|
||||
build/
|
||||
dist/
|
||||
.eggs/
|
||||
*.egg-info/
|
||||
pip-wheel-metadata/
|
||||
|
||||
# Testing / coverage
|
||||
.coverage
|
||||
.coverage.*
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
|
||||
# Tool caches
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
.ruff_cache/
|
||||
.pyre/
|
||||
.pytype/
|
||||
.pyright/
|
||||
|
||||
# Jupyter
|
||||
.ipynb_checkpoints/
|
||||
|
||||
# =============================================================================
|
||||
# Node / Frontend
|
||||
# =============================================================================
|
||||
node_modules/
|
||||
.next/
|
||||
.nuxt/
|
||||
coverage/
|
||||
*.tsbuildinfo
|
||||
|
||||
# =============================================================================
|
||||
# Java / Kotlin
|
||||
# =============================================================================
|
||||
.gradle/
|
||||
out/
|
||||
*.class
|
||||
|
||||
# =============================================================================
|
||||
# Go
|
||||
# =============================================================================
|
||||
bin/
|
||||
*.test
|
||||
|
||||
# =============================================================================
|
||||
# Rust
|
||||
# =============================================================================
|
||||
target/
|
||||
|
||||
# =============================================================================
|
||||
# C / C++ / CMake
|
||||
# =============================================================================
|
||||
cmake-build-*/
|
||||
CMakeFiles/
|
||||
CMakeCache.txt
|
||||
compile_commands.json
|
||||
|
||||
# =============================================================================
|
||||
# Docker
|
||||
# =============================================================================
|
||||
docker-compose.override.yml
|
||||
*.tar
|
||||
|
||||
# =============================================================================
|
||||
# Databases
|
||||
# =============================================================================
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
*.db
|
||||
|
||||
# =============================================================================
|
||||
# ML / Data artifacts
|
||||
# =============================================================================
|
||||
*.pt
|
||||
*.pth
|
||||
*.onnx
|
||||
*.h5
|
||||
*.ckpt
|
||||
*.safetensors
|
||||
*.npy
|
||||
*.npz
|
||||
*.parquet
|
||||
*.joblib
|
||||
*.pkl
|
||||
*.pickle
|
||||
|
||||
# =============================================================================
|
||||
# Archives
|
||||
# =============================================================================
|
||||
*.zip
|
||||
*.tar.*
|
||||
*.gz
|
||||
*.7z
|
||||
*.rar
|
||||
|
||||
# =============================================================================
|
||||
# Temporary
|
||||
# =============================================================================
|
||||
tmp/
|
||||
temp/
|
||||
*.tmp
|
||||
.cache/
|
||||
55
.pre-commit-config.yaml
Normal file
55
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,55 @@
|
||||
repos:
|
||||
# =============================================================================
|
||||
# Ruff (lint + import sorting + formatting)
|
||||
# =============================================================================
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
- id: ruff-format
|
||||
|
||||
# =============================================================================
|
||||
# Base repository hygiene
|
||||
# =============================================================================
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
args: [--allow-multiple-documents]
|
||||
- id: check-json
|
||||
- id: check-toml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: check-merge-conflict
|
||||
- id: detect-private-key
|
||||
- id: check-added-large-files
|
||||
- id: debug-statements
|
||||
- id: check-executables-have-shebangs
|
||||
- id: requirements-txt-fixer
|
||||
|
||||
# =============================================================================
|
||||
# Static typing
|
||||
# =============================================================================
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.10.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
args: [--ignore-missing-imports]
|
||||
|
||||
# =============================================================================
|
||||
# Security checks
|
||||
# =============================================================================
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.7.8
|
||||
hooks:
|
||||
- id: bandit
|
||||
args: ["-r", "src"]
|
||||
|
||||
# =============================================================================
|
||||
# Secret detection
|
||||
# =============================================================================
|
||||
- repo: https://github.com/Yelp/detect-secrets
|
||||
rev: v1.5.0
|
||||
hooks:
|
||||
- id: detect-secrets
|
||||
20
Dockerfile
Normal file
20
Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Используем легкий Node.js образ
|
||||
FROM node:20-alpine
|
||||
|
||||
# Рабочая директория
|
||||
WORKDIR /app
|
||||
|
||||
# Копируем package.json
|
||||
COPY package*.json ./
|
||||
|
||||
# Устанавливаем зависимости
|
||||
RUN npm install --production
|
||||
|
||||
# Копируем весь проект
|
||||
COPY . .
|
||||
|
||||
# Открываем порт
|
||||
EXPOSE 3000
|
||||
|
||||
# Запуск сервера
|
||||
CMD ["npm", "start"]
|
||||
75
Makefile
Normal file
75
Makefile
Normal file
@@ -0,0 +1,75 @@
|
||||
# Имя проекта (можно менять)
|
||||
PROJECT_NAME=postgres_admin
|
||||
|
||||
# Файл compose
|
||||
COMPOSE=docker-compose
|
||||
|
||||
# === Основные команды ===
|
||||
|
||||
# Сборка контейнеров
|
||||
build:
|
||||
$(COMPOSE) build
|
||||
|
||||
# Запуск (в фоне)
|
||||
up:
|
||||
$(COMPOSE) up -d
|
||||
|
||||
# Остановка
|
||||
down:
|
||||
$(COMPOSE) down
|
||||
|
||||
# Перезапуск
|
||||
restart:
|
||||
$(COMPOSE) down
|
||||
$(COMPOSE) up -d
|
||||
|
||||
# Пересборка + запуск
|
||||
rebuild:
|
||||
$(COMPOSE) up -d --build
|
||||
|
||||
# === Логи ===
|
||||
|
||||
# Все логи
|
||||
logs:
|
||||
$(COMPOSE) logs -f
|
||||
|
||||
# Логи backend
|
||||
logs-app:
|
||||
$(COMPOSE) logs -f backend
|
||||
|
||||
# Логи базы
|
||||
logs-db:
|
||||
$(COMPOSE) logs -f postgres
|
||||
|
||||
# === Обслуживание ===
|
||||
|
||||
# Зайти в контейнер backend
|
||||
bash:
|
||||
$(COMPOSE) exec backend sh
|
||||
|
||||
# Зайти в postgres
|
||||
psql:
|
||||
$(COMPOSE) exec postgres psql -U postgres -d testdb
|
||||
|
||||
# Очистка (осторожно — удаляет данные!)
|
||||
clean:
|
||||
$(COMPOSE) down -v
|
||||
docker system prune -f
|
||||
|
||||
# Полный ресет (жёстко)
|
||||
reset:
|
||||
$(COMPOSE) down -v --remove-orphans
|
||||
docker system prune -af
|
||||
|
||||
# === Обновление ===
|
||||
|
||||
# Обновить код + пересобрать
|
||||
update:
|
||||
git pull
|
||||
$(COMPOSE) up -d --build
|
||||
|
||||
# === Статус ===
|
||||
|
||||
# Проверить контейнеры
|
||||
ps:
|
||||
$(COMPOSE) ps
|
||||
56
docker-compose.yml
Normal file
56
docker-compose.yml
Normal file
@@ -0,0 +1,56 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:16
|
||||
container_name: app_postgres
|
||||
restart: unless-stopped
|
||||
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: testdb
|
||||
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
|
||||
networks:
|
||||
- app_network
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
|
||||
backend:
|
||||
build: .
|
||||
container_name: app_backend
|
||||
restart: unless-stopped
|
||||
|
||||
ports:
|
||||
- "3000:3000"
|
||||
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
networks:
|
||||
- app_network
|
||||
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
||||
|
||||
networks:
|
||||
app_network:
|
||||
driver: bridge
|
||||
16
favicon.svg
Normal file
16
favicon.svg
Normal file
@@ -0,0 +1,16 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 64 64">
|
||||
<!-- Фон -->
|
||||
<rect width="64" height="64" rx="16" fill="#1d4ed8"/>
|
||||
|
||||
<!-- Иконка базы данных -->
|
||||
<g fill="none" stroke="white" stroke-width="3">
|
||||
<!-- Верх -->
|
||||
<ellipse cx="32" cy="20" rx="12" ry="6"/>
|
||||
|
||||
<!-- Боковые линии -->
|
||||
<path d="M20 20v16c0 3.3 5.4 6 12 6s12-2.7 12-6V20"/>
|
||||
|
||||
<!-- Средние линии -->
|
||||
<path d="M20 28c0 3.3 5.4 6 12 6s12-2.7 12-6"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 513 B |
1754
index.html
Normal file
1754
index.html
Normal file
File diff suppressed because it is too large
Load Diff
21
package.json
Normal file
21
package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "postgres-admin-panel",
|
||||
"version": "1.0.0",
|
||||
"description": "PostgreSQL Admin Panel with .env configuration",
|
||||
"main": "server.js",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"dev": "nodemon server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.18.2",
|
||||
"pg": "^8.11.3",
|
||||
"dotenv": "^16.3.1",
|
||||
"cors": "^2.8.5",
|
||||
"express-session": "^1.17.3",
|
||||
"bcryptjs": "^2.4.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nodemon": "^3.0.1"
|
||||
}
|
||||
}
|
||||
950
server.js
Normal file
950
server.js
Normal file
@@ -0,0 +1,950 @@
|
||||
require('dotenv').config();
|
||||
const express = require('express');
|
||||
const { Pool } = require('pg');
|
||||
const session = require('express-session');
|
||||
const cors = require('cors');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const http = require('http');
|
||||
const bcrypt = require('bcryptjs');
|
||||
const crypto = require('crypto');
|
||||
|
||||
let usersConfig = { users: [] };
|
||||
try {
|
||||
usersConfig = JSON.parse(fs.readFileSync(path.join(__dirname, 'users.json'), 'utf8'));
|
||||
} catch (err) {
|
||||
console.warn('⚠️ users.json not found or invalid JSON. Falling back to env-based admin only.');
|
||||
}
|
||||
|
||||
const rolePermissions = {
|
||||
superadmin: { folders: null, canCreate: true, canEdit: true, canDelete: true },
|
||||
frontend_admin: { folders: ['frontend'], canCreate: true, canEdit: true, canDelete: true },
|
||||
backend_admin: { folders: ['backend'], canCreate: true, canEdit: true, canDelete: true },
|
||||
frontend_moder: { folders: ['frontend'], canCreate: true, canEdit: true, canDelete: false },
|
||||
backend_moder: { folders: ['backend'], canCreate: true, canEdit: true, canDelete: false },
|
||||
viewer: { folders: null, canCreate: false, canEdit: false, canDelete: false },
|
||||
};
|
||||
|
||||
function getUser(username) {
|
||||
return usersConfig.users.find(u => u.username === username);
|
||||
}
|
||||
|
||||
function getTableFolder(tableName) {
|
||||
if (!tableName) return 'default';
|
||||
const parts = tableName.split('__');
|
||||
return parts.length > 1 ? parts[0] : 'default';
|
||||
}
|
||||
|
||||
function getRolePermissions(role) {
|
||||
return rolePermissions[role] || rolePermissions.viewer;
|
||||
}
|
||||
|
||||
function canAccessTable(role, tableName) {
|
||||
const perms = getRolePermissions(role);
|
||||
if (!perms.folders) return true;
|
||||
const folder = getTableFolder(tableName);
|
||||
return perms.folders.includes(folder);
|
||||
}
|
||||
|
||||
const USERS_FILE = path.join(__dirname, 'users.json');
|
||||
const DOCKER_SOCKET_PATH = process.env.DOCKER_SOCKET_PATH || '/var/run/docker.sock';
|
||||
const DOCKER_API_PREFIX = process.env.DOCKER_API_PREFIX || '/v1.41';
|
||||
const SAFE_IDENTIFIER = /^[a-zA-Z_][a-zA-Z0-9_]*$/;
|
||||
const ALLOWED_SQL_TYPES = new Set(['VARCHAR(255)', 'TEXT', 'INTEGER', 'BIGINT', 'DECIMAL', 'BOOLEAN', 'DATE', 'TIMESTAMP', 'UUID', 'JSON', 'JSONB']);
|
||||
const LEGACY_ROLE_MAP = {
|
||||
frontend_admin: { role: 'admin', folders: ['frontend'] },
|
||||
backend_admin: { role: 'admin', folders: ['backend'] },
|
||||
frontend_moder: { role: 'moderator', folders: ['frontend'] },
|
||||
backend_moder: { role: 'moderator', folders: ['backend'] },
|
||||
viewer: { role: 'viewer', folders: null },
|
||||
superadmin: { role: 'superadmin', folders: null },
|
||||
};
|
||||
|
||||
function readUsersConfig() {
|
||||
try {
|
||||
const parsed = JSON.parse(fs.readFileSync(USERS_FILE, 'utf8'));
|
||||
const users = Array.isArray(parsed.users) ? parsed.users : [];
|
||||
return { users: users.map(normalizeUser).filter(Boolean) };
|
||||
} catch (err) {
|
||||
console.warn('users.json not found or invalid JSON. Falling back to env-based superadmin only.');
|
||||
return { users: [] };
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeUser(user) {
|
||||
if (!user || typeof user.username !== 'string') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const legacy = LEGACY_ROLE_MAP[user.role];
|
||||
const role = legacy ? legacy.role : user.role;
|
||||
const folders = Array.isArray(user.folders)
|
||||
? user.folders.filter(Boolean)
|
||||
: legacy
|
||||
? legacy.folders
|
||||
: null;
|
||||
|
||||
return {
|
||||
username: user.username,
|
||||
password: typeof user.password === 'string' ? user.password : undefined,
|
||||
passwordHash: typeof user.passwordHash === 'string' ? user.passwordHash : undefined,
|
||||
role: ['admin', 'moderator', 'viewer', 'superadmin'].includes(role) ? role : 'viewer',
|
||||
folders,
|
||||
disabled: Boolean(user.disabled),
|
||||
};
|
||||
}
|
||||
|
||||
function getUser(username) {
|
||||
return readUsersConfig().users.find((user) => user.username === username) || null;
|
||||
}
|
||||
|
||||
function getRolePermissions(role, folders = null) {
|
||||
if (role === 'superadmin') {
|
||||
return { role, folders: null, canCreate: true, canEdit: true, canDelete: true, canViewLogs: true, canRunSql: true };
|
||||
}
|
||||
|
||||
if (role === 'admin') {
|
||||
return { role, folders: folders && folders.length ? folders : null, canCreate: true, canEdit: true, canDelete: true, canViewLogs: true, canRunSql: true };
|
||||
}
|
||||
|
||||
if (role === 'moderator') {
|
||||
return { role, folders: folders && folders.length ? folders : null, canCreate: true, canEdit: true, canDelete: false, canViewLogs: false, canRunSql: false };
|
||||
}
|
||||
|
||||
return { role: 'viewer', folders: folders && folders.length ? folders : null, canCreate: false, canEdit: false, canDelete: false, canViewLogs: false, canRunSql: false };
|
||||
}
|
||||
|
||||
function canAccessTable(permissionsOrRole, tableName, folders = null) {
|
||||
const perms = typeof permissionsOrRole === 'string'
|
||||
? getRolePermissions(permissionsOrRole, folders)
|
||||
: permissionsOrRole;
|
||||
if (!perms.folders) return true;
|
||||
return perms.folders.includes(getTableFolder(tableName));
|
||||
}
|
||||
|
||||
function canAccessFolder(permissions, folder) {
|
||||
if (!permissions.folders) return true;
|
||||
return permissions.folders.includes(folder);
|
||||
}
|
||||
|
||||
function isValidIdentifier(value) {
|
||||
return SAFE_IDENTIFIER.test(value);
|
||||
}
|
||||
|
||||
function quoteIdentifier(identifier) {
|
||||
if (!isValidIdentifier(identifier)) {
|
||||
throw new Error(`Unsafe identifier: ${identifier}`);
|
||||
}
|
||||
return `"${identifier}"`;
|
||||
}
|
||||
|
||||
function createSessionUser({ username, role, folders }) {
|
||||
return {
|
||||
username,
|
||||
role,
|
||||
permissions: getRolePermissions(role, folders),
|
||||
};
|
||||
}
|
||||
|
||||
async function verifyPassword(user, password) {
|
||||
if (!user || user.disabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (user.passwordHash) {
|
||||
return bcrypt.compare(password, user.passwordHash);
|
||||
}
|
||||
|
||||
return user.password === password;
|
||||
}
|
||||
|
||||
function dockerRequest(requestPath, { stream = false } = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request({
|
||||
socketPath: DOCKER_SOCKET_PATH,
|
||||
path: `${DOCKER_API_PREFIX}${requestPath}`,
|
||||
method: 'GET',
|
||||
}, (response) => {
|
||||
if (stream) {
|
||||
if (response.statusCode >= 400) {
|
||||
const chunks = [];
|
||||
response.on('data', (chunk) => chunks.push(chunk));
|
||||
response.on('end', () => reject(new Error(Buffer.concat(chunks).toString('utf8') || 'Docker stream error')));
|
||||
return;
|
||||
}
|
||||
resolve(response);
|
||||
return;
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
response.on('data', (chunk) => chunks.push(chunk));
|
||||
response.on('end', () => {
|
||||
const body = Buffer.concat(chunks);
|
||||
if (response.statusCode >= 400) {
|
||||
reject(new Error(body.toString('utf8') || 'Docker API error'));
|
||||
return;
|
||||
}
|
||||
resolve(body);
|
||||
});
|
||||
});
|
||||
|
||||
req.on('error', reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function demuxDockerChunk(buffer) {
|
||||
let offset = 0;
|
||||
let output = '';
|
||||
|
||||
while (offset + 8 <= buffer.length) {
|
||||
const payloadLength = buffer.readUInt32BE(offset + 4);
|
||||
const payloadStart = offset + 8;
|
||||
const payloadEnd = payloadStart + payloadLength;
|
||||
|
||||
if (payloadEnd > buffer.length) {
|
||||
output += buffer.slice(offset).toString('utf8');
|
||||
return output;
|
||||
}
|
||||
|
||||
output += buffer.slice(payloadStart, payloadEnd).toString('utf8');
|
||||
offset = payloadEnd;
|
||||
}
|
||||
|
||||
if (offset < buffer.length) {
|
||||
output += buffer.slice(offset).toString('utf8');
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
async function listContainers() {
|
||||
const body = await dockerRequest('/containers/json?all=1');
|
||||
const containers = JSON.parse(body.toString('utf8'));
|
||||
return containers.map((container) => ({
|
||||
id: container.Id,
|
||||
name: container.Names?.[0]?.replace(/^\//, '') || container.Id.slice(0, 12),
|
||||
state: container.State,
|
||||
status: container.Status,
|
||||
image: container.Image,
|
||||
}));
|
||||
}
|
||||
|
||||
async function resolveContainer(nameOrId) {
|
||||
const containers = await listContainers();
|
||||
const container = containers.find((item) =>
|
||||
item.id === nameOrId || item.id.startsWith(nameOrId) || item.name === nameOrId
|
||||
);
|
||||
|
||||
if (!container) {
|
||||
throw new Error('Container not found');
|
||||
}
|
||||
|
||||
return container;
|
||||
}
|
||||
|
||||
const app = express();
|
||||
|
||||
// Middleware
|
||||
app.use(cors());
|
||||
app.use(express.json({ limit: '1mb' }));
|
||||
app.use(express.static('.'));
|
||||
|
||||
// Session configuration
|
||||
app.use(session({
|
||||
secret: process.env.SESSION_SECRET || 'default-secret-change-this',
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
cookie: {
|
||||
secure: false,
|
||||
httpOnly: true,
|
||||
sameSite: 'lax',
|
||||
}
|
||||
}));
|
||||
|
||||
// Database connection pool (uses .env configuration)
|
||||
const pool = new Pool({
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
database: process.env.DB_NAME,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
});
|
||||
|
||||
// Test database connection on startup
|
||||
pool.connect((err, client, release) => {
|
||||
if (err) {
|
||||
console.error('❌ Error connecting to PostgreSQL:', err.message);
|
||||
console.log('Проверьте настройки в .env файле');
|
||||
} else {
|
||||
console.log('✅ Connected to PostgreSQL database');
|
||||
console.log(` Host: ${process.env.DB_HOST}:${process.env.DB_PORT}`);
|
||||
console.log(` Database: ${process.env.DB_NAME}`);
|
||||
release();
|
||||
}
|
||||
});
|
||||
|
||||
// Helper: get primary key column for a table (returns null if none)
|
||||
async function getPrimaryKeyColumn(tableName) {
|
||||
const result = await pool.query(`
|
||||
SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_name = $1
|
||||
AND tc.table_schema = 'public'
|
||||
LIMIT 1
|
||||
`, [tableName]);
|
||||
|
||||
return result.rows[0]?.column_name || null;
|
||||
}
|
||||
|
||||
const requireAuth = (req, res, next) => {
|
||||
if (!req.session || !req.session.user) {
|
||||
return res.status(401).json({ success: false, error: 'Unauthorized' });
|
||||
}
|
||||
|
||||
req.currentUser = req.session.user;
|
||||
return next();
|
||||
};
|
||||
|
||||
const requirePermission = (check, errorMessage) => (req, res, next) => {
|
||||
if (!check(req.currentUser.permissions, req)) {
|
||||
return res.status(403).json({ success: false, error: errorMessage });
|
||||
}
|
||||
return next();
|
||||
};
|
||||
|
||||
const requireTableAccess = (req, res, next) => {
|
||||
const { tableName } = req.params;
|
||||
|
||||
try {
|
||||
quoteIdentifier(tableName);
|
||||
} catch (error) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid table name' });
|
||||
}
|
||||
|
||||
if (!canAccessTable(req.currentUser.permissions, tableName)) {
|
||||
return res.status(403).json({ success: false, error: 'Access denied' });
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
|
||||
app.post('/api/login', async (req, res) => {
|
||||
const { username, password } = req.body || {};
|
||||
|
||||
if (!username || !password) {
|
||||
return res.status(400).json({ success: false, error: 'Username and password are required' });
|
||||
}
|
||||
|
||||
const user = getUser(username);
|
||||
if (user && await verifyPassword(user, password)) {
|
||||
try {
|
||||
const result = await pool.query('SELECT NOW() as time');
|
||||
const sessionUser = createSessionUser(user);
|
||||
req.session.user = sessionUser;
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
username: sessionUser.username,
|
||||
role: sessionUser.role,
|
||||
permissions: sessionUser.permissions,
|
||||
dbInfo: {
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
database: process.env.DB_NAME,
|
||||
connected: true,
|
||||
serverTime: result.rows[0].time,
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
error: 'Database connection failed',
|
||||
details: err.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (username === process.env.ADMIN_USERNAME && password === process.env.ADMIN_PASSWORD) {
|
||||
try {
|
||||
const result = await pool.query('SELECT NOW() as time');
|
||||
const sessionUser = createSessionUser({ username, role: 'superadmin', folders: null });
|
||||
req.session.user = sessionUser;
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
username: sessionUser.username,
|
||||
role: sessionUser.role,
|
||||
permissions: sessionUser.permissions,
|
||||
dbInfo: {
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
database: process.env.DB_NAME,
|
||||
connected: true,
|
||||
serverTime: result.rows[0].time,
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
error: 'Database connection failed',
|
||||
details: err.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
error: 'Invalid credentials'
|
||||
});
|
||||
});
|
||||
|
||||
// Logout
|
||||
app.post('/api/logout', (req, res) => {
|
||||
req.session.destroy(() => {
|
||||
res.json({ success: true });
|
||||
});
|
||||
});
|
||||
|
||||
// Check session
|
||||
app.get('/api/session', (req, res) => {
|
||||
if (req.session && req.session.user) {
|
||||
res.json({
|
||||
authenticated: true,
|
||||
username: req.session.user.username,
|
||||
role: req.session.user.role,
|
||||
permissions: req.session.user.permissions,
|
||||
dbInfo: {
|
||||
host: process.env.DB_HOST,
|
||||
port: process.env.DB_PORT,
|
||||
database: process.env.DB_NAME
|
||||
}
|
||||
});
|
||||
} else {
|
||||
res.json({ authenticated: false });
|
||||
}
|
||||
});
|
||||
|
||||
// Get all tables
|
||||
app.get('/api/tables', requireAuth, async (req, res) => {
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
table_name as name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
ORDER BY table_name
|
||||
`);
|
||||
|
||||
const accessibleTables = result.rows.filter(table => canAccessTable(req.currentUser.permissions, table.name));
|
||||
const tablesWithCounts = await Promise.all(
|
||||
accessibleTables.map(async (table) => {
|
||||
try {
|
||||
const countResult = await pool.query(`SELECT COUNT(*)::int as count FROM ${quoteIdentifier(table.name)}`);
|
||||
return {
|
||||
...table,
|
||||
rows: countResult.rows[0].count
|
||||
};
|
||||
} catch (e) {
|
||||
return { ...table, rows: 0 };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
res.json(tablesWithCounts);
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/tables/:tableName/data', requireAuth, requireTableAccess, async (req, res) => {
|
||||
const { tableName } = req.params;
|
||||
const page = Math.max(parseInt(req.query.page, 10) || 1, 1);
|
||||
const limit = Math.min(Math.max(parseInt(req.query.limit, 10) || 25, 1), 100);
|
||||
const search = String(req.query.search || '').trim();
|
||||
const sortColumn = String(req.query.sortColumn || '').trim();
|
||||
const sortDirection = String(req.query.sortDirection || 'ASC').toUpperCase() === 'DESC' ? 'DESC' : 'ASC';
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
let filters = {};
|
||||
try {
|
||||
filters = req.query.filters ? JSON.parse(req.query.filters) : {};
|
||||
} catch (err) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid filters payload' });
|
||||
}
|
||||
|
||||
try {
|
||||
const columnsResult = await pool.query(`
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = $1 AND table_schema = 'public'
|
||||
ORDER BY ordinal_position
|
||||
`, [tableName]);
|
||||
|
||||
const columns = columnsResult.rows.map(row => row.column_name);
|
||||
const whereParts = [];
|
||||
const params = [];
|
||||
|
||||
if (search && columns.length) {
|
||||
params.push(`%${search}%`);
|
||||
const placeholder = `$${params.length}`;
|
||||
whereParts.push(`(${columns.map(col => `CAST(${quoteIdentifier(col)} AS TEXT) ILIKE ${placeholder}`).join(' OR ')})`);
|
||||
}
|
||||
|
||||
if (filters && typeof filters === 'object') {
|
||||
Object.entries(filters).forEach(([column, value]) => {
|
||||
if (!columns.includes(column) || !String(value || '').trim()) {
|
||||
return;
|
||||
}
|
||||
params.push(`%${String(value).trim()}%`);
|
||||
whereParts.push(`CAST(${quoteIdentifier(column)} AS TEXT) ILIKE $${params.length}`);
|
||||
});
|
||||
}
|
||||
|
||||
const whereClause = whereParts.length ? `WHERE ${whereParts.join(' AND ')}` : '';
|
||||
const orderBy = columns.includes(sortColumn)
|
||||
? `ORDER BY ${quoteIdentifier(sortColumn)} ${sortDirection}`
|
||||
: 'ORDER BY 1';
|
||||
const countResult = await pool.query(`SELECT COUNT(*)::int as total FROM ${quoteIdentifier(tableName)} ${whereClause}`, params);
|
||||
const total = countResult.rows[0].total;
|
||||
const result = await pool.query(`
|
||||
SELECT * FROM ${quoteIdentifier(tableName)}
|
||||
${whereClause}
|
||||
${orderBy}
|
||||
LIMIT $${params.length + 1} OFFSET $${params.length + 2}
|
||||
`, [...params, limit, offset]);
|
||||
|
||||
res.json({
|
||||
data: result.rows,
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
totalPages: Math.max(Math.ceil(total / limit), 1)
|
||||
});
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/tables/:tableName/structure', requireAuth, requireTableAccess, async (req, res) => {
|
||||
const { tableName } = req.params;
|
||||
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
c.column_name as name,
|
||||
c.data_type as type,
|
||||
c.is_nullable as nullable,
|
||||
c.column_default as default_value,
|
||||
CASE WHEN kcu.column_name IS NOT NULL THEN true ELSE false END as is_primary
|
||||
FROM information_schema.columns c
|
||||
LEFT JOIN information_schema.table_constraints tc
|
||||
ON tc.table_name = c.table_name
|
||||
AND tc.table_schema = c.table_schema
|
||||
AND tc.constraint_type = 'PRIMARY KEY'
|
||||
LEFT JOIN information_schema.key_column_usage kcu
|
||||
ON kcu.constraint_name = tc.constraint_name
|
||||
AND kcu.table_schema = tc.table_schema
|
||||
AND kcu.column_name = c.column_name
|
||||
WHERE c.table_name = $1 AND c.table_schema = 'public'
|
||||
ORDER BY c.ordinal_position
|
||||
`, [tableName]);
|
||||
|
||||
res.json(result.rows);
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/tables', requireAuth, requirePermission((permissions, req) => {
|
||||
const folder = getTableFolder(req.body?.name);
|
||||
return permissions.canCreate && canAccessFolder(permissions, folder);
|
||||
}, 'Access denied'), async (req, res) => {
|
||||
const { name, columns } = req.body || {};
|
||||
|
||||
if (!isValidIdentifier(name) || !Array.isArray(columns) || !columns.length) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid table payload' });
|
||||
}
|
||||
|
||||
try {
|
||||
const columnsSQL = columns.map((col) => {
|
||||
if (!isValidIdentifier(col.name) || !ALLOWED_SQL_TYPES.has(col.type)) {
|
||||
throw new Error('Invalid column definition');
|
||||
}
|
||||
|
||||
let def = `${quoteIdentifier(col.name)} ${col.type}`;
|
||||
if (col.pk) def += ' PRIMARY KEY';
|
||||
if (!col.nullable && !col.pk) def += ' NOT NULL';
|
||||
return def;
|
||||
}).join(', ');
|
||||
|
||||
await pool.query(`CREATE TABLE ${quoteIdentifier(name)} (${columnsSQL})`);
|
||||
res.json({ success: true, message: 'Table created' });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/api/tables/:tableName', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canDelete && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
try {
|
||||
await pool.query(`DROP TABLE IF EXISTS ${quoteIdentifier(req.params.tableName)}`);
|
||||
res.json({ success: true, message: 'Table deleted' });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/tables/:tableName/records', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canEdit && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName } = req.params;
|
||||
const data = req.body || {};
|
||||
|
||||
try {
|
||||
const structureResult = await pool.query(`
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = $1 AND table_schema = 'public'
|
||||
`, [tableName]);
|
||||
|
||||
const structure = structureResult.rows;
|
||||
const filteredData = {};
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const colInfo = structure.find(col => col.column_name === key);
|
||||
if (!colInfo || value === '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (colInfo.data_type === 'uuid') {
|
||||
filteredData[key] = value && String(value).trim() ? value : crypto.randomUUID();
|
||||
} else {
|
||||
filteredData[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
const columns = Object.keys(filteredData);
|
||||
if (!columns.length) {
|
||||
return res.status(400).json({ success: false, error: 'No record values provided' });
|
||||
}
|
||||
|
||||
const values = Object.values(filteredData);
|
||||
const placeholders = values.map((_, i) => `$${i + 1}`).join(', ');
|
||||
const sql = `INSERT INTO ${quoteIdentifier(tableName)} (${columns.map(quoteIdentifier).join(', ')}) VALUES (${placeholders}) RETURNING *`;
|
||||
const result = await pool.query(sql, values);
|
||||
res.json({ success: true, data: result.rows[0] });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.put('/api/tables/:tableName/records/:pk', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canEdit && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName, pk } = req.params;
|
||||
const data = req.body || {};
|
||||
const columns = Object.keys(data).filter(isValidIdentifier);
|
||||
|
||||
if (!columns.length) {
|
||||
return res.status(400).json({ success: false, error: 'No valid fields to update' });
|
||||
}
|
||||
|
||||
try {
|
||||
const primaryKey = await getPrimaryKeyColumn(tableName) || 'id';
|
||||
const values = columns.map((column) => data[column]);
|
||||
const setClause = columns.map((col, i) => `${quoteIdentifier(col)} = $${i + 1}`).join(', ');
|
||||
const sql = `UPDATE ${quoteIdentifier(tableName)} SET ${setClause} WHERE ${quoteIdentifier(primaryKey)} = $${values.length + 1} RETURNING *`;
|
||||
const result = await pool.query(sql, [...values, pk]);
|
||||
res.json({ success: true, data: result.rows[0] });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/api/tables/:tableName/records/:pk', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canDelete && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName, pk } = req.params;
|
||||
|
||||
try {
|
||||
const primaryKey = await getPrimaryKeyColumn(tableName) || 'id';
|
||||
await pool.query(`DELETE FROM ${quoteIdentifier(tableName)} WHERE ${quoteIdentifier(primaryKey)} = $1`, [pk]);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/tables/:tableName/columns', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canEdit && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName } = req.params;
|
||||
const { name, type, nullable = true, defaultValue, primaryKey } = req.body || {};
|
||||
|
||||
if (!isValidIdentifier(name) || !ALLOWED_SQL_TYPES.has(type)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid column definition' });
|
||||
}
|
||||
|
||||
const parts = [`${quoteIdentifier(name)} ${type}`];
|
||||
if (primaryKey) parts.push('PRIMARY KEY');
|
||||
if (!nullable) parts.push('NOT NULL');
|
||||
if (defaultValue !== undefined && defaultValue !== null && defaultValue !== '') {
|
||||
parts.push(`DEFAULT ${defaultValue}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await pool.query(`ALTER TABLE ${quoteIdentifier(tableName)} ADD COLUMN ${parts.join(' ')}`);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.put('/api/tables/:tableName/columns/:columnName', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canEdit && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName, columnName } = req.params;
|
||||
const { type, nullable, defaultValue } = req.body || {};
|
||||
|
||||
if (!isValidIdentifier(columnName)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid column name' });
|
||||
}
|
||||
|
||||
try {
|
||||
if (type) {
|
||||
if (!ALLOWED_SQL_TYPES.has(type)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid column type' });
|
||||
}
|
||||
await pool.query(`ALTER TABLE ${quoteIdentifier(tableName)} ALTER COLUMN ${quoteIdentifier(columnName)} TYPE ${type}`);
|
||||
}
|
||||
|
||||
if (typeof nullable === 'boolean') {
|
||||
const nullSql = nullable ? 'DROP NOT NULL' : 'SET NOT NULL';
|
||||
await pool.query(`ALTER TABLE ${quoteIdentifier(tableName)} ALTER COLUMN ${quoteIdentifier(columnName)} ${nullSql}`);
|
||||
}
|
||||
|
||||
if (defaultValue !== undefined) {
|
||||
if (defaultValue === null || defaultValue === '') {
|
||||
await pool.query(`ALTER TABLE ${quoteIdentifier(tableName)} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`);
|
||||
} else {
|
||||
await pool.query(`ALTER TABLE ${quoteIdentifier(tableName)} ALTER COLUMN ${quoteIdentifier(columnName)} SET DEFAULT ${defaultValue}`);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/api/tables/:tableName/columns/:columnName', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canDelete && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName, columnName } = req.params;
|
||||
|
||||
if (!isValidIdentifier(columnName)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid column name' });
|
||||
}
|
||||
|
||||
try {
|
||||
await pool.query(`ALTER TABLE ${quoteIdentifier(tableName)} DROP COLUMN IF EXISTS ${quoteIdentifier(columnName)}`);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/query', requireAuth, requirePermission(
|
||||
(permissions) => permissions.canRunSql,
|
||||
'SQL access denied'
|
||||
), async (req, res) => {
|
||||
const { sql } = req.body || {};
|
||||
|
||||
if (!sql || typeof sql !== 'string') {
|
||||
return res.status(400).json({ success: false, error: 'SQL query is required' });
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await pool.query(sql);
|
||||
res.json({
|
||||
success: true,
|
||||
rows: result.rows,
|
||||
rowCount: result.rowCount,
|
||||
command: result.command
|
||||
});
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/tables/:tableName/indexes', requireAuth, requireTableAccess, async (req, res) => {
|
||||
const { tableName } = req.params;
|
||||
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT
|
||||
indexname as name,
|
||||
indexdef as definition
|
||||
FROM pg_indexes
|
||||
WHERE tablename = $1
|
||||
`, [tableName]);
|
||||
|
||||
const indexes = result.rows.map(row => ({
|
||||
name: row.name,
|
||||
columns: row.definition.match(/\((.*?)\)/)?.[1] || 'unknown',
|
||||
unique: row.definition.includes('UNIQUE'),
|
||||
type: 'btree'
|
||||
}));
|
||||
|
||||
res.json(indexes);
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.post('/api/tables/:tableName/indexes', requireAuth, requireTableAccess, requirePermission(
|
||||
(permissions, req) => permissions.canEdit && canAccessTable(permissions, req.params.tableName),
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { tableName } = req.params;
|
||||
const { name, columns, unique } = req.body || {};
|
||||
|
||||
if (!isValidIdentifier(name)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid index name' });
|
||||
}
|
||||
|
||||
try {
|
||||
const uniqueStr = unique ? 'UNIQUE ' : '';
|
||||
const columnList = String(columns || '').split(',').map((item) => item.trim()).filter(Boolean);
|
||||
if (!columnList.length || !columnList.every(isValidIdentifier)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid index columns' });
|
||||
}
|
||||
const sql = `CREATE ${uniqueStr}INDEX ${quoteIdentifier(name)} ON ${quoteIdentifier(tableName)} (${columnList.map(quoteIdentifier).join(', ')})`;
|
||||
await pool.query(sql);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.delete('/api/indexes/:indexName', requireAuth, requirePermission(
|
||||
(permissions) => permissions.canDelete,
|
||||
'Access denied'
|
||||
), async (req, res) => {
|
||||
const { indexName } = req.params;
|
||||
|
||||
if (!isValidIdentifier(indexName)) {
|
||||
return res.status(400).json({ success: false, error: 'Invalid index name' });
|
||||
}
|
||||
|
||||
try {
|
||||
await pool.query(`DROP INDEX IF EXISTS ${quoteIdentifier(indexName)}`);
|
||||
res.json({ success: true });
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: err.message });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/containers', requireAuth, requirePermission(
|
||||
(permissions) => permissions.canViewLogs,
|
||||
'Logs access denied'
|
||||
), async (req, res) => {
|
||||
try {
|
||||
const containers = await listContainers();
|
||||
res.json(containers);
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: `Docker is unavailable: ${err.message}` });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/containers/:name/logs', requireAuth, requirePermission(
|
||||
(permissions) => permissions.canViewLogs,
|
||||
'Logs access denied'
|
||||
), async (req, res) => {
|
||||
const tail = Math.min(Math.max(parseInt(req.query.tail, 10) || 200, 20), 1000);
|
||||
|
||||
try {
|
||||
const container = await resolveContainer(req.params.name);
|
||||
const body = await dockerRequest(`/containers/${container.id}/logs?stdout=1&stderr=1&tail=${tail}×tamps=1`);
|
||||
res.json({
|
||||
success: true,
|
||||
container,
|
||||
logs: demuxDockerChunk(body),
|
||||
});
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: `Failed to read container logs: ${err.message}` });
|
||||
}
|
||||
});
|
||||
|
||||
app.get('/api/containers/:name/logs/stream', requireAuth, requirePermission(
|
||||
(permissions) => permissions.canViewLogs,
|
||||
'Logs access denied'
|
||||
), async (req, res) => {
|
||||
try {
|
||||
const container = await resolveContainer(req.params.name);
|
||||
const stream = await dockerRequest(`/containers/${container.id}/logs?stdout=1&stderr=1&follow=1&tail=50×tamps=1`, { stream: true });
|
||||
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache, no-transform',
|
||||
Connection: 'keep-alive',
|
||||
});
|
||||
|
||||
res.write(`event: meta\ndata: ${JSON.stringify({ name: container.name, status: container.status })}\n\n`);
|
||||
|
||||
const heartbeat = setInterval(() => {
|
||||
res.write('event: heartbeat\ndata: {}\n\n');
|
||||
}, 15000);
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
const text = demuxDockerChunk(chunk);
|
||||
if (!text) return;
|
||||
text.split(/\r?\n/).filter(Boolean).forEach((line) => {
|
||||
res.write(`event: log\ndata: ${JSON.stringify({ line })}\n\n`);
|
||||
});
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
clearInterval(heartbeat);
|
||||
res.write('event: end\ndata: {}\n\n');
|
||||
res.end();
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
clearInterval(heartbeat);
|
||||
res.write(`event: error\ndata: ${JSON.stringify({ message: error.message })}\n\n`);
|
||||
res.end();
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
clearInterval(heartbeat);
|
||||
stream.destroy();
|
||||
});
|
||||
} catch (err) {
|
||||
res.status(500).json({ success: false, error: `Failed to stream logs: ${err.message}` });
|
||||
}
|
||||
});
|
||||
|
||||
// Start server
|
||||
const PORT = process.env.PORT || 3000;
|
||||
app.listen(PORT, () => {
|
||||
console.log(`🚀 Server running on http://localhost:${PORT}`);
|
||||
console.log('');
|
||||
console.log('🔑 Default login credentials:');
|
||||
console.log(` Username: ${process.env.ADMIN_USERNAME || 'admin'}`);
|
||||
console.log(` Password: ${process.env.ADMIN_PASSWORD || 'admin'}`);
|
||||
console.log('');
|
||||
console.log('📝 Make sure to configure your database in .env file');
|
||||
});
|
||||
46
users.json
Normal file
46
users.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"username": "platform_admin",
|
||||
"password": "admin123",
|
||||
"role": "admin",
|
||||
"folders": null,
|
||||
"disabled": false
|
||||
},
|
||||
{
|
||||
"username": "frontend_admin",
|
||||
"password": "frontend123",
|
||||
"role": "admin",
|
||||
"folders": ["frontend"],
|
||||
"disabled": false
|
||||
},
|
||||
{
|
||||
"username": "backend_admin",
|
||||
"password": "backend123",
|
||||
"role": "admin",
|
||||
"folders": ["backend"],
|
||||
"disabled": false
|
||||
},
|
||||
{
|
||||
"username": "frontend_moderator",
|
||||
"password": "moder123",
|
||||
"role": "moderator",
|
||||
"folders": ["frontend"],
|
||||
"disabled": false
|
||||
},
|
||||
{
|
||||
"username": "backend_moderator",
|
||||
"password": "moder123",
|
||||
"role": "moderator",
|
||||
"folders": ["backend"],
|
||||
"disabled": false
|
||||
},
|
||||
{
|
||||
"username": "viewer",
|
||||
"password": "viewer123",
|
||||
"role": "viewer",
|
||||
"folders": null,
|
||||
"disabled": false
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user