initial commit
Some checks failed
CI / Run tests (push) Has been cancelled
CI / Docker build test (push) Has been cancelled
CI / Lint (ruff + mypy) (push) Has been cancelled

This commit is contained in:
2026-03-30 16:46:26 +07:00
commit 2a7dfa95c8
67 changed files with 5864 additions and 0 deletions

View File

@@ -0,0 +1,12 @@
{
"permissions": {
"allow": [
"Bash(curl:*)",
"WebSearch",
"WebFetch(domain:glitchtip.com)",
"WebFetch(domain:github.com)",
"WebFetch(domain:raw.githubusercontent.com)",
"Bash(poetry install:*)"
]
}
}

138
.dockerignore Normal file
View File

@@ -0,0 +1,138 @@
# =============================================================================
# Git
# =============================================================================
.git
.gitea
.github
.gitlab
.gitlab-ci.yml
.gitattributes
.pre-commit-config.yaml
# =============================================================================
# Python virtual environments
# =============================================================================
.venv
venv
env
ENV
# =============================================================================
# Python cache
# =============================================================================
__pycache__/
*.py[cod]
*.pyo
*.pyd
*.so
# =============================================================================
# Python tooling
# =============================================================================
.mypy_cache/
.pytest_cache/
.ruff_cache/
.pytype/
.pyre/
.pyright/
# =============================================================================
# Testing / Coverage
# =============================================================================
.coverage
.coverage.*
htmlcov/
.tox/
.nox/
tests/
test/
coverage.xml
# =============================================================================
# Build artifacts
# =============================================================================
build/
dist/
.eggs/
*.egg-info/
pip-wheel-metadata/
# =============================================================================
# Logs
# =============================================================================
*.log
logs/
log/
# =============================================================================
# Node / Frontend
# =============================================================================
node_modules/
.next/
.nuxt/
out/
coverage/
*.tsbuildinfo
# =============================================================================
# IDE / Editor
# =============================================================================
.idea/
.vscode/
*.swp
*.swo
*~
.DS_Store
Thumbs.db
# =============================================================================
# Environment files
# =============================================================================
.env
.env.*
!.env.example
!.env.sample
# =============================================================================
# Databases
# =============================================================================
*.db
*.sqlite
*.sqlite3
# =============================================================================
# Secrets
# =============================================================================
*.pem
*.key
*.crt
*.p12
*.pfx
secrets/
# =============================================================================
# Temporary
# =============================================================================
tmp/
temp/
*.tmp
*.temp
.cache/
# =============================================================================
# Jupyter
# =============================================================================
.ipynb_checkpoints/
# =============================================================================
# ML artifacts
# =============================================================================
*.pt
*.pth
*.onnx
*.h5
*.ckpt
*.safetensors
*.npy
*.npz
*.parquet

65
.editorconfig Normal file
View File

@@ -0,0 +1,65 @@
root = true
# =============================================================================
# Global settings
# =============================================================================
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
indent_style = space
indent_size = 4
tab_width = 4
# =============================================================================
# Python
# =============================================================================
[*.py]
max_line_length = 88
# =============================================================================
# YAML (Docker, CI, compose)
# =============================================================================
[*.yml]
indent_size = 2
[*.yaml]
indent_size = 2
# =============================================================================
# JSON
# =============================================================================
[*.json]
indent_size = 2
# =============================================================================
# TOML (pyproject.toml, poetry)
# =============================================================================
[*.toml]
indent_size = 2
# =============================================================================
# Markdown
# =============================================================================
[*.md]
trim_trailing_whitespace = false
indent_size = 2
# =============================================================================
# Shell scripts
# =============================================================================
[*.sh]
indent_size = 2
# =============================================================================
# Makefile (tabs required)
# =============================================================================
[Makefile]
indent_style = tab
# =============================================================================
# INI / config files
# =============================================================================
[*.ini]
indent_size = 2

42
.env.example Normal file
View File

@@ -0,0 +1,42 @@
# Telegram
TELEGRAM_BOT_TOKEN=your-bot-token-here
TELEGRAM_GROUP_CHAT_ID=-1001234567890
TELEGRAM_ADMIN_IDS=
# Topic IDs
TELEGRAM_BACKEND_TOPIC_ID=1
TELEGRAM_FRONTEND_TOPIC_ID=2
TELEGRAM_DIGEST_TOPIC_ID=3
# Project ownership
BACKEND_PROJECTS=backend-production,backend-staging,backend-testing
FRONTEND_PROJECTS=frontend-production,frontend-staging,android-production,web-production
# Telegram DM subscribers
BACKEND_SUBSCRIBERS=
FRONTEND_SUBSCRIBERS=
# GlitchTip
GLITCHTIP_URL=https://glitchtip.example.com
GLITCHTIP_API_TOKEN=your-glitchtip-auth-token-here
GLITCHTIP_ORG_SLUG=your-org-slug
# Database
DATABASE_URL=postgresql+asyncpg://glitchup:glitchup@db:5432/glitchup
# Webhook API
API_PORT=8080
WEBHOOK_SECRET=
# Scheduler
DIGEST_CRON_DAY=mon
DIGEST_CRON_HOUR=10
DIGEST_CRON_MINUTE=0
DIGEST_TIMEZONE=Asia/Krasnoyarsk
SYNC_INTERVAL_MINUTES=30
# Alert filtering and control
ALERT_ENVIRONMENTS=production
DEDUP_WINDOW_HOURS=6
ALERT_RATE_LIMIT_COUNT=10
ALERT_RATE_LIMIT_WINDOW_MINUTES=15

83
.gitattributes vendored Normal file
View File

@@ -0,0 +1,83 @@
# =============================================================================
# Global text normalization
# =============================================================================
* text=auto eol=lf
# =============================================================================
# Shell scripts (must stay LF)
# =============================================================================
*.sh text eol=lf
*.bash text eol=lf
*.zsh text eol=lf
# =============================================================================
# Windows scripts
# =============================================================================
*.bat text eol=crlf
*.cmd text eol=crlf
*.ps1 text eol=crlf
# =============================================================================
# Binary images
# =============================================================================
*.png binary
*.jpg binary
*.jpeg binary
*.gif binary
*.bmp binary
*.webp binary
*.ico binary
# SVG is text
*.svg text
# =============================================================================
# Media
# =============================================================================
*.mp3 binary
*.wav binary
*.ogg binary
*.mp4 binary
*.mov binary
*.avi binary
*.mkv binary
# =============================================================================
# Fonts
# =============================================================================
*.eot binary
*.ttf binary
*.woff binary
*.woff2 binary
*.otf binary
# =============================================================================
# Documents
# =============================================================================
*.pdf binary
# =============================================================================
# WebAssembly
# =============================================================================
*.wasm binary
# =============================================================================
# Jupyter
# =============================================================================
*.ipynb binary
# =============================================================================
# Git LFS (ML / large artifacts)
# =============================================================================
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
# =============================================================================
# GitHub linguist hints
# =============================================================================
docs/** linguist-documentation
generated/** linguist-generated
vendor/** linguist-vendored

View File

@@ -0,0 +1,24 @@
---
name: Bug report
about: Сообщить об ошибке
title: "[BUG]"
labels: bug
---
## Описание
Опишите проблему.
## Как воспроизвести
1. ...
2. ...
3. ...
## Ожидаемое поведение
Что должно было произойти.
## Логи / скриншоты
Добавьте при необходимости.

View File

@@ -0,0 +1,16 @@
---
name: Feature request
about: Предложить новую функцию
title: "[FEATURE]"
labels: enhancement
---
## Описание функции
Опишите идею.
## Зачем это нужно
Какая проблема решается.
## Дополнительная информация

View File

@@ -0,0 +1,14 @@
## Описание
Что делает этот PR?
## Тип изменения
- [ ] bug fix
- [ ] новая функция
- [ ] рефакторинг
## Проверки
- [ ] тесты проходят
- [ ] код отформатирован

114
.gitea/workflows/ci.yml Normal file
View File

@@ -0,0 +1,114 @@
# ============================================================
# Continuous Integration (CI)
# Основной pipeline проверки проекта
# ============================================================
name: CI
# ------------------------------------------------------------
# Когда запускать workflow.yml
# ------------------------------------------------------------
on:
push:
branches:
- main
- develop
pull_request:
# ------------------------------------------------------------
# Глобальные переменные
# ------------------------------------------------------------
env:
PYTHON_VERSION: "3.13"
# ============================================================
# JOB 1 — Проверка качества кода
# ============================================================
jobs:
lint:
name: Lint (ruff + mypy)
runs-on: ubuntu-latest
steps:
# Клонирование репозитория
- name: Checkout repository
uses: actions/checkout@v4
# Установка Python
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
# Установка uv (быстрый менеджер зависимостей)
- name: Install uv
run: pip install uv
# Кэширование зависимостей
- name: Cache uv dependencies
uses: actions/cache@v4
with:
path: ~/.cache/uv
key: ${{ runner.os }}-uv-${{ hashFiles('uv.lock') }}
# Установка зависимостей
- name: Install dependencies
run: uv sync
# Проверка кода линтером Ruff
- name: Run Ruff
run: ruff check .
# Проверка типизации
- name: Run MyPy
run: mypy .
# ============================================================
# JOB 2 — Тестирование
# ============================================================
tests:
name: Run tests
runs-on: ubuntu-latest
needs: lint
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
run: pip install uv
- name: Install dependencies
run: uv sync
# Запуск pytest
- name: Run tests
run: pytest
# ============================================================
# JOB 3 — Проверка Docker
# ============================================================
docker:
name: Docker build test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Проверка что Dockerfile собирается
- name: Build Docker image
run: docker build .

View File

@@ -0,0 +1,33 @@
# ============================================================
# Auto format code
# ============================================================
name: Auto Format
on:
pull_request:
jobs:
format:
name: Format code
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install formatter
run: pip install ruff
# Автоматическое форматирование
- name: Run formatter
run: ruff format .
# Проверка что после форматирования нет изменений
- name: Check formatting
run: ruff check .

View File

@@ -0,0 +1,36 @@
# ============================================================
# Security checks
# Проверка уязвимостей зависимостей
# ============================================================
name: Security
on:
schedule:
- cron: "0 3 * * 1"
workflow_dispatch:
jobs:
security-scan:
name: Dependency security scan
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Установка Python
- uses: actions/setup-python@v5
with:
python-version: "3.12"
# Установка инструмента проверки
- name: Install pip-audit
run: pip install pip-audit
# Проверка зависимостей
- name: Run security audit
run: pip-audit

View File

@@ -0,0 +1,31 @@
# ============================================================
# Release workflow
# Автоматическая публикация релизов
# ============================================================
name: Release
on:
push:
tags:
- "v*"
jobs:
release:
name: Create release
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Установка Python
- uses: actions/setup-python@v5
with:
python-version: "3.12"
# Создание релиза
- name: Create GitHub Release
uses: softprops/action-gh-release@v2

166
.gitignore vendored Normal file
View File

@@ -0,0 +1,166 @@
# =============================================================================
# OS
# =============================================================================
.DS_Store
Thumbs.db
Desktop.ini
# =============================================================================
# IDE / Editors
# =============================================================================
.idea/
.vscode/
*.swp
*.swo
*~
*.sublime-*
*.code-workspace
# =============================================================================
# Logs
# =============================================================================
*.log
*.logs
*.logs.*
*.log.*
logs/
log/
# =============================================================================
# Environment / Secrets
# =============================================================================
.env
.env.*
!.env.example
!.env.sample
!.env.template
# =============================================================================
# Security keys
# =============================================================================
*.pem
*.key
*.crt
*.p12
*.pfx
secrets/
# =============================================================================
# Python
# =============================================================================
__pycache__/
*.py[cod]
*$py.class
*.so
# Virtual environments
.venv/
venv/
env/
ENV/
# Packaging
build/
dist/
.eggs/
*.egg-info/
pip-wheel-metadata/
# Testing / coverage
.coverage
.coverage.*
htmlcov/
.tox/
.nox/
# Tool caches
.pytest_cache/
.mypy_cache/
.ruff_cache/
.pyre/
.pytype/
.pyright/
# Jupyter
.ipynb_checkpoints/
# =============================================================================
# Node / Frontend
# =============================================================================
node_modules/
.next/
.nuxt/
coverage/
*.tsbuildinfo
# =============================================================================
# Java / Kotlin
# =============================================================================
.gradle/
out/
*.class
# =============================================================================
# Go
# =============================================================================
bin/
*.test
# =============================================================================
# Rust
# =============================================================================
target/
# =============================================================================
# C / C++ / CMake
# =============================================================================
cmake-build-*/
CMakeFiles/
CMakeCache.txt
compile_commands.json
# =============================================================================
# Docker
# =============================================================================
docker-compose.override.yml
*.tar
# =============================================================================
# Databases
# =============================================================================
*.sqlite
*.sqlite3
*.db
# =============================================================================
# ML / Data artifacts
# =============================================================================
*.pt
*.pth
*.onnx
*.h5
*.ckpt
*.safetensors
*.npy
*.npz
*.parquet
*.joblib
*.pkl
*.pickle
# =============================================================================
# Archives
# =============================================================================
*.zip
*.tar.*
*.gz
*.7z
*.rar
# =============================================================================
# Temporary
# =============================================================================
tmp/
temp/
*.tmp
.cache/

55
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,55 @@
repos:
# =============================================================================
# Ruff (lint + import sorting + formatting)
# =============================================================================
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.4
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
# =============================================================================
# Base repository hygiene
# =============================================================================
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-yaml
args: [--allow-multiple-documents]
- id: check-json
- id: check-toml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-merge-conflict
- id: detect-private-key
- id: check-added-large-files
- id: debug-statements
- id: check-executables-have-shebangs
- id: requirements-txt-fixer
# =============================================================================
# Static typing
# =============================================================================
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.10.0
hooks:
- id: mypy
args: [--ignore-missing-imports]
# =============================================================================
# Security checks
# =============================================================================
- repo: https://github.com/PyCQA/bandit
rev: 1.7.8
hooks:
- id: bandit
args: ["-r", "src"]
# =============================================================================
# Secret detection
# =============================================================================
- repo: https://github.com/Yelp/detect-secrets
rev: v1.5.0
hooks:
- id: detect-secrets

55
Dockerfile Normal file
View File

@@ -0,0 +1,55 @@
# syntax=docker/dockerfile:1.7
FROM python:3.13-slim AS builder
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
UV_LINK_MODE=copy
WORKDIR /app
COPY --from=ghcr.io/astral-sh/uv:0.10.8 /uv /uvx /bin/
COPY pyproject.toml README.md ./
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --no-dev --no-install-project
COPY src ./src
COPY migrations ./migrations
COPY alembic.ini ./
COPY scripts ./scripts
COPY .env.example ./
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --no-dev
FROM python:3.13-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
PATH="/app/.venv/bin:$PATH" \
PYTHONPATH="/app/src"
WORKDIR /app
RUN groupadd -g 1000 app \
&& useradd -u 1000 -g app -m -s /usr/sbin/nologin app
COPY --from=builder /app/.venv /app/.venv
COPY --chown=app:app src ./src
COPY --chown=app:app migrations ./migrations
COPY --chown=app:app alembic.ini ./
COPY --chown=app:app scripts ./scripts
COPY --chown=app:app .env.example ./
COPY --chown=app:app README.md ./
COPY --chown=app:app pyproject.toml ./
RUN chmod +x /app/scripts/start.sh /app/scripts/run_migration.sh \
&& chown -R app:app /app
USER app
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f\"http://127.0.0.1:{os.getenv('API_PORT', '8080')}/health\")"
CMD ["/app/scripts/start.sh"]

277
README.md Normal file
View File

@@ -0,0 +1,277 @@
# GlitchUp Bot
Telegram-бот для GlitchTip, который:
- принимает webhook-алерты и отправляет их в Telegram;
- фильтрует окружения, делает dedup, mute по regex и мягкий rate limit;
- синхронизирует unresolved issues из GlitchTip API в локальный cache;
- строит digest, release summary и проектные сводки;
- поддерживает runtime-настройку ownership без рестарта через Telegram-команды;
- запускается локально через `uv` и в Docker через `docker compose`.
## Что сделано
В проекте уже реализовано:
- `POST /webhooks/glitchtip` для Slack-compatible payload из GlitchTip;
- Telegram polling с командами для просмотра сводок, релизов, sync-статуса и runtime-настроек;
- регулярный API sync в `issues_cache`;
- обновление `sync_state` для `api_sync` и `webhook`;
- release tracking через digest и команды `/releases`, `/release`;
- mute rules по regex с хранением в БД;
- rate limit для `P2`-алертов, при этом `P1` не режется;
- ownership overrides для project/topic/subscribers без рестарта;
- PostgreSQL + Alembic миграции;
- Docker-first запуск с авто-`alembic upgrade head`.
## Быстрый старт
### 1. Подготовить `.env`
Скопировать шаблон:
```bash
cp .env.example .env
```
Заполнить минимум:
```env
TELEGRAM_BOT_TOKEN=your-bot-token
TELEGRAM_GROUP_CHAT_ID=-1001234567890
GLITCHTIP_URL=https://glitchtip.example.com
GLITCHTIP_API_TOKEN=your-token
GLITCHTIP_ORG_SLUG=your-org
DATABASE_URL=postgresql+asyncpg://glitchup:glitchup@db:5432/glitchup
```
Если хочешь ограничить админ-команды, укажи:
```env
TELEGRAM_ADMIN_IDS=123456789,987654321
```
Если список пустой, команды изменения конфигурации будут доступны всем.
### 2. Локальный запуск через `uv`
```bash
uv sync --dev
uv run alembic upgrade head
uv run glitchup-bot
```
Полезные команды разработки:
```bash
uv run pytest
uv run ruff check
uv run ruff format --check
uv run python -m glitchup_bot
```
### 3. Запуск через Docker
```bash
docker compose up --build
```
Что произойдёт:
- поднимется PostgreSQL;
- контейнер приложения выполнит `alembic upgrade head`;
- затем стартуют FastAPI, Telegram polling и scheduler.
Webhook endpoint:
```text
http://<host>:8080/webhooks/glitchtip
```
Healthcheck:
```text
http://<host>:8080/health
```
## Как пользоваться ботом
### Главное изменение интерфейса
Теперь ботом можно пользоваться не только командами, но и через inline-кнопки в Telegram:
- `/help` открывает экран поддержки и быстрых действий;
- `/admin` открывает админ-панель с основными управляющими действиями;
- для частых сценариев больше не нужно помнить все команды вручную.
### Обычный сценарий
1. Добавить бота в Telegram-группу.
2. Включить forum/topics режим в группе.
3. Создать топики:
backend alerts, frontend alerts, digest.
4. Узнать их `topic_id` и записать в `.env`.
5. Добавить webhook в GlitchTip на `POST /webhooks/glitchtip`.
6. При необходимости задать `WEBHOOK_SECRET` и тот же заголовок в GlitchTip.
7. После запуска бот начнёт:
- слать `P1/P2` алерты в Telegram;
- синхронизировать issues из API;
- строить digest и отвечать на команды.
8. Открой `/help`, чтобы пользоваться ботом через кнопки.
### Что делает бот в работе
- `P1`:
критичный production alert, отправляется сразу.
- `P2`:
production alert без критического цвета, отправляется сразу, но может быть ограничен rate limit.
- `P3`:
не шлётся realtime, остаётся в sync/digest-данных.
- Uptime alert:
идёт как `P1`.
### Основные команды
- `/help` — экран помощи, поддержки и быстрых кнопок.
- `/week` — digest за неделю.
- `/today` — новые issues за сегодня.
- `/project <slug>` — сводка по одному проекту.
- `/top` — самые шумные unresolved issues.
- `/stale` — самые старые unresolved issues.
- `/releases` — список релизов с незакрытыми issues.
- `/release <version>` — детали по конкретному релизу.
- `/sync_status` — время последней синхронизации.
- `/subscribe <backend|frontend>` — добавить себе runtime DM-подписку.
- `/unsubscribe <backend|frontend>` — убрать свою runtime DM-подписку.
### Админ-команды
- `/admin` — кнопочная админ-панель.
- `/sync` — принудительный sync cache.
- `/ownership` — показать текущие overrides.
- `/owner <slug> <backend|frontend>` — привязать проект к группе.
- `/owner_reset <slug>` — удалить runtime override проекта.
- `/topic <backend|frontend|digest> <topic_id>` — переопределить topic.
- `/topic_reset <backend|frontend|digest>` — снять topic override.
- `/mute_add <regex>` — добавить regex mute rule.
- `/mute_list` — показать mute rules.
- `/mute_del <id>` — удалить mute rule.
## Конфигурация
Основные переменные окружения:
```env
TELEGRAM_BOT_TOKEN=your-bot-token-here
TELEGRAM_GROUP_CHAT_ID=-1001234567890
TELEGRAM_ADMIN_IDS=123456789
TELEGRAM_BACKEND_TOPIC_ID=1
TELEGRAM_FRONTEND_TOPIC_ID=2
TELEGRAM_DIGEST_TOPIC_ID=3
BACKEND_PROJECTS=backend-production,backend-staging
FRONTEND_PROJECTS=frontend-production,frontend-staging
BACKEND_SUBSCRIBERS=123456789,987654321
FRONTEND_SUBSCRIBERS=
GLITCHTIP_URL=https://glitchtip.example.com
GLITCHTIP_API_TOKEN=your-token
GLITCHTIP_ORG_SLUG=your-org
DATABASE_URL=postgresql+asyncpg://glitchup:glitchup@db:5432/glitchup
API_PORT=8080
WEBHOOK_SECRET=
DIGEST_CRON_DAY=mon
DIGEST_CRON_HOUR=10
DIGEST_CRON_MINUTE=0
DIGEST_TIMEZONE=Asia/Krasnoyarsk
SYNC_INTERVAL_MINUTES=30
ALERT_ENVIRONMENTS=production
DEDUP_WINDOW_HOURS=6
ALERT_RATE_LIMIT_COUNT=10
ALERT_RATE_LIMIT_WINDOW_MINUTES=15
```
## Как это работает
### Webhook-контур
Для обычных issue-alert:
1. проверяется `X-Webhook-Secret`, если задан `WEBHOOK_SECRET`;
2. из attachment извлекается `Project`;
3. алерт фильтруется по `ALERT_ENVIRONMENTS`;
4. применяется dedup по fingerprint `project:title`;
5. `production + #e52b50` считается `P1`, остальное в production идёт как `P2`;
6. перед отправкой применяются mute rules;
7. для `P2` применяется rate limit по группе;
8. результат записывается в `notifications_sent`.
Для uptime-alert:
- сразу отправляется как `P1`;
- идёт в Telegram без обычного issue dedup.
### API sync
Sync:
- ходит в GlitchTip по всем проектам из конфигурации;
- upsert-ит unresolved issues в `issues_cache`;
- помечает пропавшие из unresolved issues как `resolved`;
- сохраняет release, regressions, event count, culprit и ссылки;
- обновляет `sync_state` для `api_sync`.
### Release tracking
- digest показывает блок "После релизов";
- `/releases` агрегирует issues по release;
- `/release <version>` показывает детали по одному релизу.
## Таблицы
Основные:
- `issues_cache` — кэш unresolved/resolved issues из API;
- `notifications_sent` — история alert/digest/uptime и статусы отправки;
- `sync_state` — состояние `api_sync` и `webhook`.
Runtime-настройки:
- `project_ownership_overrides` — project → group overrides;
- `group_topic_overrides` — topic overrides;
- `group_subscriber_overrides` — runtime subscriber overrides;
- `mute_rules` — regex mute rules.
## Тесты
```bash
uv run pytest
```
Покрыты базовые сценарии для:
- настроек;
- webhook endpoint;
- alert processor;
- digest builder;
- Telegram sender.
## Идеи и недоделки
Сейчас бот уже рабочий, но в этот блок вынесены идеи для следующих итераций:
- полноценный wizard-интерфейс в Telegram для изменения параметров без ручного ввода команд;
- отдельные команды для управления env-based подписчиками, а не только runtime overrides;
- более умный rate limit с burst/recovery-политикой и отдельной статистикой suppress-событий;
- mute rules с scope по проекту или группе, а не только глобальные regex;
- richer release analytics: сравнение "до/после релиза", отдельные regression-отчёты;
- отдельная admin-аутентификация вместо режима "если `TELEGRAM_ADMIN_IDS` пустой, можно всем";
- export/backup runtime-настроек ownership и mute rules;
- e2e-тесты с реальной БД и контейнерным прогоном;
- дополнительные команды для ручной диагностики sync и очередей уведомлений.

36
alembic.ini Normal file
View File

@@ -0,0 +1,36 @@
[alembic]
script_location = migrations
sqlalchemy.url = driver://user:pass@localhost/dbname
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

71
cliff.toml Normal file
View File

@@ -0,0 +1,71 @@
# ============================================================
# GIT CLIFF CONFIGURATION
# ============================================================
[changelog]
header = """
# Changelog
All notable changes to this project will be documented in this file.
The format is based on:
https://keepachangelog.com
https://semver.org
"""
body = """
{% if version %}
## {{ version }} — {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}
## Unreleased
{% endif %}
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group }}
{% for commit in commits %}
- {{ commit.message | upper_first }} ({{ commit.id | truncate(length=7, end="") }})
{% endfor %}
{% endfor %}
"""
footer = """
---
Generated automatically by git-cliff
"""
trim = true
[git]
conventional_commits = true
filter_unconventional = true
split_commits = false
sort_commits = "oldest"
commit_parsers = [
{ message = ".*!", group = "💥 Breaking Changes" },
{ message = "^feat(\\(.+\\))?:", group = "🚀 Features" },
{ message = "^fix(\\(.+\\))?:", group = "🐛 Bug Fixes" },
{ message = "^perf(\\(.+\\))?:", group = "⚡ Performance" },
{ message = "^refactor(\\(.+\\))?:", group = "♻️ Refactoring" },
{ message = "^docs(\\(.+\\))?:", group = "📚 Documentation" },
{ message = "^style(\\(.+\\))?:", group = "🎨 Styling" },
{ message = "^test(\\(.+\\))?:", group = "🧪 Tests" },
{ message = "^build(\\(.+\\))?:", group = "📦 Build System" },
{ message = "^ci(\\(.+\\))?:", group = "⚙️ CI" },
{ message = "^chore(\\(.+\\))?:", group = "🔧 Maintenance" }
]
tag_pattern = "v[0-9]*"
[remote.github]
owner = "your-user"
repo = "your-repo"

28
docker-compose.yml Normal file
View File

@@ -0,0 +1,28 @@
services:
bot:
build: .
restart: unless-stopped
env_file: .env
ports:
- "${API_PORT:-8080}:8080"
depends_on:
db:
condition: service_healthy
db:
image: postgres:17-alpine
restart: unless-stopped
environment:
POSTGRES_USER: glitchup
POSTGRES_PASSWORD: glitchup
POSTGRES_DB: glitchup
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U glitchup"]
interval: 5s
timeout: 3s
retries: 5
volumes:
pgdata:

57
migrations/env.py Normal file
View File

@@ -0,0 +1,57 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import async_engine_from_config
from glitchup_bot.config import settings
from glitchup_bot.models import Base
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
config.set_main_option("sqlalchemy.url", settings.database_url)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

View File

@@ -0,0 +1,103 @@
"""initial tables
Revision ID: 20260327_0001
Revises:
Create Date: 2026-03-27 00:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "20260327_0001"
down_revision: str | None = None
branch_labels: Sequence[str] | None = None
depends_on: Sequence[str] | None = None
def upgrade() -> None:
op.create_table(
"issues_cache",
sa.Column("id", sa.BigInteger(), primary_key=True, autoincrement=True),
sa.Column("glitchtip_issue_id", sa.BigInteger(), nullable=False),
sa.Column("project_slug", sa.String(length=255), nullable=False),
sa.Column("title", sa.Text(), nullable=False),
sa.Column("culprit", sa.Text(), nullable=True),
sa.Column("level", sa.String(length=50), nullable=False),
sa.Column("status", sa.String(length=50), nullable=False),
sa.Column("first_seen", sa.DateTime(timezone=True), nullable=True),
sa.Column("last_seen", sa.DateTime(timezone=True), nullable=True),
sa.Column("event_count", sa.Integer(), nullable=False),
sa.Column("is_regression", sa.Boolean(), nullable=False),
sa.Column("link", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(
op.f("ix_issues_cache_glitchtip_issue_id"),
"issues_cache",
["glitchtip_issue_id"],
unique=True,
)
op.create_index(
op.f("ix_issues_cache_project_slug"), "issues_cache", ["project_slug"], unique=False
)
op.create_table(
"notifications_sent",
sa.Column("id", sa.BigInteger(), primary_key=True, autoincrement=True),
sa.Column("issue_id", sa.BigInteger(), nullable=False),
sa.Column("notification_type", sa.String(length=50), nullable=False),
sa.Column("fingerprint", sa.String(length=255), nullable=False),
sa.Column(
"sent_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False
),
)
op.create_index(
op.f("ix_notifications_sent_fingerprint"),
"notifications_sent",
["fingerprint"],
unique=False,
)
op.create_index(
op.f("ix_notifications_sent_issue_id"), "notifications_sent", ["issue_id"], unique=False
)
op.create_table(
"sync_state",
sa.Column("id", sa.BigInteger(), primary_key=True, autoincrement=True),
sa.Column("source", sa.String(length=100), nullable=False),
sa.Column("last_successful_at", sa.DateTime(timezone=True), nullable=True),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(op.f("ix_sync_state_source"), "sync_state", ["source"], unique=True)
def downgrade() -> None:
op.drop_index(op.f("ix_sync_state_source"), table_name="sync_state")
op.drop_table("sync_state")
op.drop_index(op.f("ix_notifications_sent_issue_id"), table_name="notifications_sent")
op.drop_index(op.f("ix_notifications_sent_fingerprint"), table_name="notifications_sent")
op.drop_table("notifications_sent")
op.drop_index(op.f("ix_issues_cache_project_slug"), table_name="issues_cache")
op.drop_index(op.f("ix_issues_cache_glitchtip_issue_id"), table_name="issues_cache")
op.drop_table("issues_cache")

View File

@@ -0,0 +1,162 @@
"""runtime features
Revision ID: 20260327_0002
Revises: 20260327_0001
Create Date: 2026-03-27 00:30:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "20260327_0002"
down_revision: str | None = "20260327_0001"
branch_labels: Sequence[str] | None = None
depends_on: Sequence[str] | None = None
def upgrade() -> None:
op.add_column("issues_cache", sa.Column("release", sa.String(length=255), nullable=True))
op.add_column(
"notifications_sent", sa.Column("project_slug", sa.String(length=255), nullable=True)
)
op.add_column(
"notifications_sent", sa.Column("group_name", sa.String(length=50), nullable=True)
)
op.add_column("notifications_sent", sa.Column("priority", sa.String(length=20), nullable=True))
op.add_column(
"notifications_sent",
sa.Column("delivery_status", sa.String(length=50), nullable=False, server_default="sent"),
)
op.create_index(
op.f("ix_notifications_sent_group_name"),
"notifications_sent",
["group_name"],
unique=False,
)
op.create_table(
"mute_rules",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("pattern", sa.Text(), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default=sa.text("true")),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_unique_constraint("uq_mute_rules_pattern", "mute_rules", ["pattern"])
op.create_table(
"project_ownership_overrides",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("project_slug", sa.String(length=255), nullable=False),
sa.Column("group_name", sa.String(length=50), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(
op.f("ix_project_ownership_overrides_project_slug"),
"project_ownership_overrides",
["project_slug"],
unique=True,
)
op.create_table(
"group_topic_overrides",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("group_name", sa.String(length=50), nullable=False),
sa.Column("topic_id", sa.Integer(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(
op.f("ix_group_topic_overrides_group_name"),
"group_topic_overrides",
["group_name"],
unique=True,
)
op.create_table(
"group_subscriber_overrides",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("group_name", sa.String(length=50), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.UniqueConstraint("group_name", "user_id", name="uq_group_subscriber"),
)
op.create_index(
op.f("ix_group_subscriber_overrides_group_name"),
"group_subscriber_overrides",
["group_name"],
unique=False,
)
op.create_index(
op.f("ix_group_subscriber_overrides_user_id"),
"group_subscriber_overrides",
["user_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index(
op.f("ix_group_subscriber_overrides_user_id"),
table_name="group_subscriber_overrides",
)
op.drop_index(
op.f("ix_group_subscriber_overrides_group_name"),
table_name="group_subscriber_overrides",
)
op.drop_table("group_subscriber_overrides")
op.drop_index(op.f("ix_group_topic_overrides_group_name"), table_name="group_topic_overrides")
op.drop_table("group_topic_overrides")
op.drop_index(
op.f("ix_project_ownership_overrides_project_slug"),
table_name="project_ownership_overrides",
)
op.drop_table("project_ownership_overrides")
op.drop_constraint("uq_mute_rules_pattern", "mute_rules", type_="unique")
op.drop_table("mute_rules")
op.drop_index(op.f("ix_notifications_sent_group_name"), table_name="notifications_sent")
op.drop_column("notifications_sent", "delivery_status")
op.drop_column("notifications_sent", "priority")
op.drop_column("notifications_sent", "group_name")
op.drop_column("notifications_sent", "project_slug")
op.drop_column("issues_cache", "release")

88
pyproject.toml Normal file
View File

@@ -0,0 +1,88 @@
[build-system]
requires = ["hatchling>=1.27.0"]
build-backend = "hatchling.build"
[project]
name = "glitchup-bot"
version = "0.1.0"
description = "Telegram bot for GlitchTip webhook alerts and digest summaries."
readme = "README.md"
requires-python = ">=3.13,<3.15"
license = { text = "MIT" }
authors = [
{ name = "GlitchUp Bot contributors" }
]
dependencies = [
"aiogram>=3.16,<4.0",
"alembic>=1.14,<2.0",
"apscheduler>=3.11,<4.0",
"asyncpg>=0.30,<1.0",
"fastapi>=0.115,<1.0",
"httpx>=0.28,<1.0",
"pydantic-settings>=2.7,<3.0",
"sqlalchemy>=2.0,<3.0",
"uvicorn>=0.34,<1.0",
]
[dependency-groups]
dev = [
"mypy>=1.10,<2.0",
"pytest>=8.3,<9.0",
"pytest-asyncio>=0.25,<1.0",
"ruff>=0.9,<1.0",
]
[project.scripts]
glitchup-bot = "glitchup_bot.main:run"
[tool.hatch.build.targets.wheel]
packages = ["src/glitchup_bot"]
[tool.pytest.ini_options]
minversion = "7.0"
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
addopts = [
"-ra",
"-vv",
"--strict-markers",
"--strict-config",
"--tb=short",
"--maxfail=1",
"--durations=10",
]
asyncio_mode = "strict"
log_cli = true
log_cli_level = "INFO"
markers = [
"slow: slow tests",
"integration: integration tests",
"e2e: end-to-end tests",
]
filterwarnings = [
"ignore::DeprecationWarning",
]
[tool.ruff]
line-length = 100
target-version = "py313"
src = ["src", "tests"]
[tool.ruff.lint]
select = ["E", "F", "I", "B", "UP"]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
[tool.mypy]
python_version = "3.13"
plugins = []
files = ["src"]
ignore_missing_imports = true
strict_optional = true
warn_unused_ignores = true
warn_redundant_casts = true
warn_unused_configs = true

5
scripts/run_migration.sh Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/sh
set -eu
cd /app
exec uv run alembic upgrade head

6
scripts/start.sh Normal file
View File

@@ -0,0 +1,6 @@
#!/bin/sh
set -eu
cd /app
alembic upgrade head
exec glitchup-bot

View File

@@ -0,0 +1 @@
__version__ = "0.1.0"

View File

@@ -0,0 +1,3 @@
from glitchup_bot.main import run
run()

View File

View File

@@ -0,0 +1,17 @@
from fastapi import FastAPI
from glitchup_bot.api.webhook import router as webhook_router
def create_app() -> FastAPI:
application = FastAPI(title="GlitchUp Bot", version="0.1.0")
application.include_router(webhook_router)
@application.get("/health")
async def health() -> dict[str, str]:
return {"status": "ok"}
return application
app = create_app()

View File

@@ -0,0 +1,25 @@
from pydantic import BaseModel, Field
class WebhookField(BaseModel):
title: str
value: str
short: bool = False
class WebhookAttachment(BaseModel):
title: str
title_link: str | None = None
text: str | None = None
image_url: str | None = None
color: str | None = None
fields: list[WebhookField] | None = None
mrkdown_in: list[str] | None = None
class GlitchTipWebhookPayload(BaseModel):
text: str
attachments: list[WebhookAttachment] = Field(default_factory=list)
def is_uptime_alert(self) -> bool:
return "uptime" in self.text.lower()

View File

@@ -0,0 +1,25 @@
import logging
from fastapi import APIRouter, Header, HTTPException, Request
from glitchup_bot.config import settings
from glitchup_bot.services.alert_processor import process_webhook_payload
router = APIRouter(prefix="/webhooks", tags=["webhooks"])
logger = logging.getLogger(__name__)
@router.post("/glitchtip")
async def glitchtip_webhook(
request: Request,
x_webhook_secret: str | None = Header(None),
):
if settings.webhook_secret and x_webhook_secret != settings.webhook_secret:
raise HTTPException(status_code=403, detail="Invalid webhook secret")
payload = await request.json()
logger.info("Received GlitchTip webhook: %s", payload.get("text", "unknown"))
await process_webhook_payload(payload)
return {"status": "accepted"}

View File

View File

@@ -0,0 +1,34 @@
from aiogram import Bot, Dispatcher
from aiogram.client.default import DefaultBotProperties
from glitchup_bot.bot.handlers.commands import router as commands_router
from glitchup_bot.config import settings
dp = Dispatcher()
dp.include_router(commands_router)
bot: Bot | None = None
def get_dispatcher() -> Dispatcher:
return dp
def get_bot() -> Bot:
global bot
if bot is None:
bot = Bot(
token=settings.telegram_bot_token,
default=DefaultBotProperties(parse_mode="HTML"),
)
return bot
async def close_bot() -> None:
global bot
if bot is not None:
await bot.session.close()
bot = None

View File

@@ -0,0 +1,618 @@
import logging
from collections.abc import Awaitable, Callable
from html import escape
from aiogram import F, Router
from aiogram.filters import Command
from aiogram.types import CallbackQuery, Message
from glitchup_bot.bot.keyboards import admin_menu_keyboard, help_menu_keyboard
from glitchup_bot.config import settings
from glitchup_bot.services.digest_builder import (
build_digest,
build_project_summary,
build_release_detail,
build_release_summary,
build_stale_issues,
build_sync_status,
build_today_summary,
build_top_issues,
run_manual_sync,
)
from glitchup_bot.services.mute_rules import add_rule, list_rules, remove_rule
from glitchup_bot.services.routing import (
add_subscriber,
clear_project_group,
clear_topic_override,
list_project_overrides,
list_subscriber_overrides,
list_topic_overrides,
remove_subscriber,
resolve_subscribers,
resolve_topic_id,
set_project_group,
set_topic_override,
)
router = Router()
logger = logging.getLogger(__name__)
def _sender_id(message: Message) -> int | None:
return message.from_user.id if message.from_user else None
def _callback_sender_id(callback: CallbackQuery) -> int | None:
return callback.from_user.id if callback.from_user else None
def _is_admin_user(user_id: int | None) -> bool:
return settings.is_admin(user_id)
async def _require_admin(message: Message) -> bool:
if _is_admin_user(_sender_id(message)):
return True
await message.answer("Команда доступна только администраторам.")
return False
async def _require_admin_callback(callback: CallbackQuery) -> bool:
if _is_admin_user(_callback_sender_id(callback)):
return True
await callback.answer("Только для администраторов", show_alert=True)
return False
def _help_text(is_admin: bool) -> str:
lines = [
"<b>GlitchUp Bot Help</b>",
"",
"Быстрые действия доступны кнопками ниже.",
"",
"<b>Пользовательские команды:</b>",
"• /week — digest за неделю",
"• /today — новые issues за сегодня",
"• /project &lt;slug&gt; — сводка по проекту",
"• /top — самые шумные issues",
"• /stale — старые незакрытые issues",
"• /releases — список релизов с issues",
"• /release &lt;version&gt; — детали по релизу",
"• /sync_status — статус последней синхронизации",
"• /subscribe &lt;backend|frontend&gt; — подписка на DM",
"• /unsubscribe &lt;backend|frontend&gt; — отписка от DM",
]
if is_admin:
lines.extend(
[
"",
"<b>Админ-команды:</b>",
"• /admin — открыть панель управления",
"• /sync — принудительный sync",
"• /ownership — показать overrides",
"• /owner &lt;slug&gt; &lt;backend|frontend&gt;",
"• /owner_reset &lt;slug&gt;",
"• /topic &lt;backend|frontend|digest&gt; &lt;topic_id&gt;",
"• /topic_reset &lt;backend|frontend|digest&gt;",
"• /mute_add &lt;regex&gt;",
"• /mute_list",
"• /mute_del &lt;id&gt;",
]
)
lines.extend(
[
"",
"<b>Как пользоваться:</b>",
"1. Открой /help и выбери нужный раздел кнопками.",
"2. Для ежедневной работы достаточно кнопок digest/today/top/stale/releases.",
"3. Для администрирования используй /admin.",
]
)
return "\n".join(lines)
def _admin_text() -> str:
return "\n".join(
[
"<b>Админ-панель GlitchUp Bot</b>",
"",
"Основные действия доступны кнопками ниже.",
"",
"<b>Быстрые действия:</b>",
"• Запустить sync",
"• Посмотреть sync status",
"• Посмотреть ownership и mute rules",
"• Открыть основные сводки",
"",
"<b>Команды настройки:</b>",
"• /owner &lt;slug&gt; &lt;backend|frontend&gt;",
"• /owner_reset &lt;slug&gt;",
"• /topic &lt;backend|frontend|digest&gt; &lt;topic_id&gt;",
"• /topic_reset &lt;backend|frontend|digest&gt;",
"• /mute_add &lt;regex&gt;",
"• /mute_del &lt;id&gt;",
]
)
async def _answer_text(
target: Message | CallbackQuery,
text: str,
*,
reply_markup=None,
disable_web_page_preview: bool = True,
) -> None:
if isinstance(target, CallbackQuery):
await target.message.answer(
text,
reply_markup=reply_markup,
disable_web_page_preview=disable_web_page_preview,
)
else:
await target.answer(
text,
reply_markup=reply_markup,
disable_web_page_preview=disable_web_page_preview,
)
async def _handle_subscription_action(
target: Message | CallbackQuery,
group_name: str,
action: str,
user_id: int | None,
) -> None:
if user_id is None:
await _answer_text(target, "Не удалось определить пользователя.")
return
if action == "subscribe":
await add_subscriber(group_name, user_id)
await _answer_text(target, f"Подписка на <b>{escape(group_name)}</b> включена.")
return
removed = await remove_subscriber(group_name, user_id)
if not removed:
await _answer_text(target, "Runtime-подписка не найдена.")
return
await _answer_text(target, f"Подписка на <b>{escape(group_name)}</b> отключена.")
async def _run_summary_action(
target: Message | CallbackQuery,
loader: Callable[[], Awaitable[str]],
) -> None:
await _answer_text(target, await loader())
@router.message(Command("start"))
async def cmd_start(message: Message) -> None:
is_admin = _is_admin_user(_sender_id(message))
text = (
"<b>GlitchUp Bot</b>\n\nБот запущен и готов к работе.\nДля удобной навигации открой /help."
)
await message.answer(
text,
reply_markup=help_menu_keyboard(is_admin),
disable_web_page_preview=True,
)
@router.message(Command("help"))
async def cmd_help(message: Message) -> None:
is_admin = _is_admin_user(_sender_id(message))
await message.answer(
_help_text(is_admin),
reply_markup=help_menu_keyboard(is_admin),
disable_web_page_preview=True,
)
@router.message(Command("admin"))
async def cmd_admin(message: Message) -> None:
if not await _require_admin(message):
return
await message.answer(
_admin_text(),
reply_markup=admin_menu_keyboard(),
disable_web_page_preview=True,
)
@router.callback_query(F.data.startswith("help:"))
async def cb_help_actions(callback: CallbackQuery) -> None:
data = callback.data or ""
action = data.removeprefix("help:")
await callback.answer()
if action == "week":
await _run_summary_action(callback, lambda: build_digest(refresh=True))
return
if action == "today":
await _run_summary_action(callback, lambda: build_today_summary(refresh=True))
return
if action == "top":
await _run_summary_action(callback, lambda: build_top_issues(refresh=True))
return
if action == "stale":
await _run_summary_action(callback, lambda: build_stale_issues(refresh=True))
return
if action == "releases":
await _run_summary_action(callback, lambda: build_release_summary(refresh=True))
return
if action == "sync_status":
await _run_summary_action(callback, build_sync_status)
return
if action.startswith("sub:"):
await _handle_subscription_action(
callback,
action.split(":", 1)[1],
"subscribe",
_callback_sender_id(callback),
)
return
if action.startswith("unsub:"):
await _handle_subscription_action(
callback,
action.split(":", 1)[1],
"unsubscribe",
_callback_sender_id(callback),
)
return
@router.callback_query(F.data == "admin:open")
async def cb_admin_open(callback: CallbackQuery) -> None:
if not await _require_admin_callback(callback):
return
await callback.answer()
await callback.message.answer(
_admin_text(),
reply_markup=admin_menu_keyboard(),
disable_web_page_preview=True,
)
@router.callback_query(F.data.startswith("admin:"))
async def cb_admin_actions(callback: CallbackQuery) -> None:
if not await _require_admin_callback(callback):
return
action = (callback.data or "").removeprefix("admin:")
await callback.answer()
if action == "sync":
summary = await run_manual_sync()
await callback.message.answer(
"<b>Sync завершён</b>\n\n"
f"• проектов: {summary.project_count}\n"
f"• issues: {summary.issue_count}\n"
f"• помечено resolved: {summary.resolved_count}\n"
f"• время: {escape(summary.synced_at.isoformat())}",
disable_web_page_preview=True,
)
return
if action == "sync_status":
await _run_summary_action(callback, build_sync_status)
return
if action == "ownership":
await cmd_ownership(callback.message)
return
if action == "mute_list":
await cmd_mute_list(callback.message)
return
if action == "releases":
await _run_summary_action(callback, lambda: build_release_summary(refresh=True))
return
if action == "today":
await _run_summary_action(callback, lambda: build_today_summary(refresh=True))
return
if action == "week":
await _run_summary_action(callback, lambda: build_digest(refresh=True))
return
if action == "top":
await _run_summary_action(callback, lambda: build_top_issues(refresh=True))
return
if action == "stale":
await _run_summary_action(callback, lambda: build_stale_issues(refresh=True))
return
if action == "guide":
await callback.message.answer(
"\n".join(
[
"<b>Подсказка по админке</b>",
"",
"Через кнопки можно быстро смотреть состояние и запускать sync.",
"Изменение параметров делается командами:",
"• /owner slug backend",
"• /topic backend 123",
"• /mute_add payment.*timeout",
]
),
disable_web_page_preview=True,
)
return
@router.message(Command("week"))
async def cmd_week(message: Message) -> None:
await message.answer(await build_digest(refresh=True), disable_web_page_preview=True)
@router.message(Command("today"))
async def cmd_today(message: Message) -> None:
await message.answer(await build_today_summary(refresh=True), disable_web_page_preview=True)
@router.message(Command("project"))
async def cmd_project(message: Message) -> None:
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /project &lt;slug&gt;")
return
await message.answer(
await build_project_summary(args[1].strip(), refresh=True),
disable_web_page_preview=True,
)
@router.message(Command("top"))
async def cmd_top(message: Message) -> None:
await message.answer(await build_top_issues(refresh=True), disable_web_page_preview=True)
@router.message(Command("stale"))
async def cmd_stale(message: Message) -> None:
await message.answer(await build_stale_issues(refresh=True), disable_web_page_preview=True)
@router.message(Command("releases"))
async def cmd_releases(message: Message) -> None:
await message.answer(await build_release_summary(refresh=True), disable_web_page_preview=True)
@router.message(Command("release"))
async def cmd_release(message: Message) -> None:
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /release &lt;version&gt;")
return
await message.answer(
await build_release_detail(args[1].strip(), refresh=True),
disable_web_page_preview=True,
)
@router.message(Command("sync_status"))
async def cmd_sync_status(message: Message) -> None:
await message.answer(await build_sync_status())
@router.message(Command("sync"))
async def cmd_sync(message: Message) -> None:
if not await _require_admin(message):
return
summary = await run_manual_sync()
await message.answer(
"<b>Sync завершён</b>\n\n"
f"• проектов: {summary.project_count}\n"
f"• issues: {summary.issue_count}\n"
f"• помечено resolved: {summary.resolved_count}\n"
f"• время: {escape(summary.synced_at.isoformat())}"
)
@router.message(Command("subscribe"))
async def cmd_subscribe(message: Message) -> None:
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /subscribe &lt;backend|frontend&gt;")
return
await _handle_subscription_action(
message,
args[1].strip().lower(),
"subscribe",
_sender_id(message),
)
@router.message(Command("unsubscribe"))
async def cmd_unsubscribe(message: Message) -> None:
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /unsubscribe &lt;backend|frontend&gt;")
return
await _handle_subscription_action(
message,
args[1].strip().lower(),
"unsubscribe",
_sender_id(message),
)
@router.message(Command("ownership"))
async def cmd_ownership(message: Message) -> None:
if not await _require_admin(message):
return
project_overrides = await list_project_overrides()
topic_overrides = await list_topic_overrides()
subscriber_overrides = await list_subscriber_overrides()
backend_subscribers = await resolve_subscribers("backend")
frontend_subscribers = await resolve_subscribers("frontend")
lines = [
"<b>Ownership runtime state</b>",
"",
"<b>Topics:</b>",
f"• backend: {await resolve_topic_id('backend')}",
f"• frontend: {await resolve_topic_id('frontend')}",
f"• digest: {await resolve_topic_id('digest')}",
"",
"<b>Subscribers:</b>",
f"• backend: {', '.join(map(str, backend_subscribers)) or 'none'}",
f"• frontend: {', '.join(map(str, frontend_subscribers)) or 'none'}",
"",
"<b>Project overrides:</b>",
]
if project_overrides:
lines.extend(
f"{escape(record.project_slug)}{escape(record.group_name)}"
for record in project_overrides
)
else:
lines.append("• none")
lines.extend(["", "<b>Topic overrides:</b>"])
if topic_overrides:
lines.extend(
f"{escape(record.group_name)}{record.topic_id}" for record in topic_overrides
)
else:
lines.append("• none")
lines.extend(["", "<b>Subscriber overrides:</b>"])
if subscriber_overrides:
lines.extend(
f"{escape(record.group_name)}{record.user_id}" for record in subscriber_overrides
)
else:
lines.append("• none")
await message.answer("\n".join(lines), disable_web_page_preview=True)
@router.message(Command("owner"))
async def cmd_owner(message: Message) -> None:
if not await _require_admin(message):
return
args = message.text.split(maxsplit=2) if message.text else []
if len(args) < 3:
await message.answer("Использование: /owner &lt;slug&gt; &lt;backend|frontend&gt;")
return
project_slug = args[1].strip()
group_name = args[2].strip().lower()
await set_project_group(project_slug, group_name)
await message.answer(
f"Проект <b>{escape(project_slug)}</b> привязан к группе {escape(group_name)}."
)
@router.message(Command("owner_reset"))
async def cmd_owner_reset(message: Message) -> None:
if not await _require_admin(message):
return
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /owner_reset &lt;slug&gt;")
return
removed = await clear_project_group(args[1].strip())
if not removed:
await message.answer("Override для проекта не найден.")
return
await message.answer("Override для проекта удалён.")
@router.message(Command("topic"))
async def cmd_topic(message: Message) -> None:
if not await _require_admin(message):
return
args = message.text.split(maxsplit=2) if message.text else []
if len(args) < 3:
await message.answer(
"Использование: /topic &lt;backend|frontend|digest&gt; &lt;topic_id&gt;"
)
return
group_name = args[1].strip().lower()
topic_id = int(args[2].strip())
await set_topic_override(group_name, topic_id)
await message.answer(f"Topic override для <b>{escape(group_name)}</b> сохранён: {topic_id}.")
@router.message(Command("topic_reset"))
async def cmd_topic_reset(message: Message) -> None:
if not await _require_admin(message):
return
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /topic_reset &lt;backend|frontend|digest&gt;")
return
removed = await clear_topic_override(args[1].strip().lower())
if not removed:
await message.answer("Topic override не найден.")
return
await message.answer("Topic override удалён.")
@router.message(Command("mute_add"))
async def cmd_mute_add(message: Message) -> None:
if not await _require_admin(message):
return
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /mute_add &lt;regex&gt;")
return
rule = await add_rule(args[1].strip())
await message.answer(f"Добавлено mute rule #{rule.id}: <code>{escape(rule.pattern)}</code>")
@router.message(Command("mute_list"))
async def cmd_mute_list(message: Message) -> None:
if not await _require_admin(message):
return
rules = await list_rules()
if not rules:
await message.answer("Mute rules не настроены.")
return
lines = ["<b>Mute rules</b>", ""]
for rule in rules:
suffix = f"{escape(rule.description)}" if rule.description else ""
lines.append(f"• #{rule.id} <code>{escape(rule.pattern)}</code>{suffix}")
await message.answer("\n".join(lines), disable_web_page_preview=True)
@router.message(Command("mute_del"))
async def cmd_mute_del(message: Message) -> None:
if not await _require_admin(message):
return
args = message.text.split(maxsplit=1) if message.text else []
if len(args) < 2:
await message.answer("Использование: /mute_del &lt;id&gt;")
return
removed = await remove_rule(int(args[1].strip()))
if not removed:
await message.answer("Mute rule не найдено.")
return
await message.answer("Mute rule удалено.")

View File

@@ -0,0 +1,36 @@
from aiogram.types import InlineKeyboardMarkup
from aiogram.utils.keyboard import InlineKeyboardBuilder
def help_menu_keyboard(is_admin: bool) -> InlineKeyboardMarkup:
builder = InlineKeyboardBuilder()
builder.button(text="Сводка за неделю", callback_data="help:week")
builder.button(text="Сегодня", callback_data="help:today")
builder.button(text="Топ issues", callback_data="help:top")
builder.button(text="Старые issues", callback_data="help:stale")
builder.button(text="Релизы", callback_data="help:releases")
builder.button(text="Статус sync", callback_data="help:sync_status")
builder.button(text="Подписка backend", callback_data="help:sub:backend")
builder.button(text="Подписка frontend", callback_data="help:sub:frontend")
builder.button(text="Отписка backend", callback_data="help:unsub:backend")
builder.button(text="Отписка frontend", callback_data="help:unsub:frontend")
if is_admin:
builder.button(text="Админ-панель", callback_data="admin:open")
builder.adjust(2)
return builder.as_markup()
def admin_menu_keyboard() -> InlineKeyboardMarkup:
builder = InlineKeyboardBuilder()
builder.button(text="Запустить sync", callback_data="admin:sync")
builder.button(text="Статус sync", callback_data="admin:sync_status")
builder.button(text="Ownership", callback_data="admin:ownership")
builder.button(text="Mute rules", callback_data="admin:mute_list")
builder.button(text="Релизы", callback_data="admin:releases")
builder.button(text="Today", callback_data="admin:today")
builder.button(text="Week digest", callback_data="admin:week")
builder.button(text="Топ issues", callback_data="admin:top")
builder.button(text="Старые issues", callback_data="admin:stale")
builder.button(text="Инструкция", callback_data="admin:guide")
builder.adjust(2)
return builder.as_markup()

115
src/glitchup_bot/config.py Normal file
View File

@@ -0,0 +1,115 @@
from functools import lru_cache
from typing import Any
from pydantic import Field, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
telegram_bot_token: str
telegram_group_chat_id: int
telegram_backend_topic_id: int
telegram_frontend_topic_id: int
telegram_digest_topic_id: int
backend_projects: list[str] = Field(default_factory=list)
frontend_projects: list[str] = Field(default_factory=list)
backend_subscribers: list[int] = Field(default_factory=list)
frontend_subscribers: list[int] = Field(default_factory=list)
telegram_admin_ids: list[int] = Field(default_factory=list)
glitchtip_url: str
glitchtip_api_token: str
glitchtip_org_slug: str
database_url: str
api_port: int = 8080
webhook_secret: str = ""
digest_cron_day: str = "mon"
digest_cron_hour: int = 10
digest_cron_minute: int = 0
digest_timezone: str = "Asia/Krasnoyarsk"
sync_interval_minutes: int = 30
alert_environments: list[str] = Field(default_factory=lambda: ["production"])
dedup_window_hours: int = 6
alert_rate_limit_count: int = 10
alert_rate_limit_window_minutes: int = 15
model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
enable_decoding=False,
)
@field_validator("backend_projects", "frontend_projects", "alert_environments", mode="before")
@classmethod
def split_comma_str(cls, value: str | list[str]) -> list[str]:
if isinstance(value, str):
return [item.strip() for item in value.split(",") if item.strip()]
return value
@field_validator(
"backend_subscribers",
"frontend_subscribers",
"telegram_admin_ids",
mode="before",
)
@classmethod
def split_comma_int(cls, value: str | list[int]) -> list[int]:
if isinstance(value, str):
return [int(item.strip()) for item in value.split(",") if item.strip()]
return value
def get_environment(self, project_slug: str) -> str | None:
parts = project_slug.rsplit("-", 1)
return parts[-1] if len(parts) == 2 else None
def get_group(self, project_slug: str | None) -> str:
if project_slug in self.frontend_projects:
return "frontend"
return "backend"
def get_topic_id(self, group: str) -> int:
if group == "digest":
return self.telegram_digest_topic_id
if group == "frontend":
return self.telegram_frontend_topic_id
return self.telegram_backend_topic_id
def get_subscribers(self, group: str) -> list[int]:
if group == "frontend":
return self.frontend_subscribers
return self.backend_subscribers
def is_alert_environment(self, project_slug: str) -> bool:
environment = self.get_environment(project_slug)
return environment in self.alert_environments if environment else False
def is_admin(self, user_id: int | None) -> bool:
if user_id is None:
return False
if not self.telegram_admin_ids:
return True
return user_id in self.telegram_admin_ids
@lru_cache(maxsize=1)
def get_settings() -> Settings:
return Settings()
def clear_settings_cache() -> None:
get_settings.cache_clear()
class SettingsProxy:
def __getattr__(self, name: str) -> Any:
return getattr(get_settings(), name)
settings = SettingsProxy()

View File

@@ -0,0 +1,106 @@
import logging
import httpx
from glitchup_bot.config import settings
logger = logging.getLogger(__name__)
class GlitchTipClient:
def __init__(self) -> None:
self.base_url = settings.glitchtip_url.rstrip("/")
self.headers = {"Authorization": f"Bearer {settings.glitchtip_api_token}"}
self._client: httpx.AsyncClient | None = None
async def _get_client(self) -> httpx.AsyncClient:
if self._client is None:
self._client = httpx.AsyncClient(
base_url=self.base_url,
headers=self.headers,
timeout=30,
)
return self._client
async def _get(self, path: str, params: dict | None = None) -> list | dict:
client = await self._get_client()
response = await client.get(path, params=params)
response.raise_for_status()
return response.json()
async def _get_paginated(self, path: str, params: dict | None = None) -> list:
results: list = []
base_params = params or {}
base_params.setdefault("limit", 100)
cursor: str | None = None
client = await self._get_client()
while True:
request_params = dict(base_params)
if cursor:
request_params["cursor"] = cursor
response = await client.get(path, params=request_params)
response.raise_for_status()
data = response.json()
results.extend(data)
next_cursor = self._parse_next_cursor(response.headers.get("link", ""))
if not next_cursor or not data:
break
cursor = next_cursor
return results
@staticmethod
def _parse_next_cursor(link_header: str) -> str | None:
for part in link_header.split(","):
if 'rel="next"' not in part or 'results="true"' not in part or "cursor=" not in part:
continue
start = part.index("cursor=") + len("cursor=")
end = part.find(">", start)
return part[start:end] if end != -1 else part[start:]
return None
async def list_projects(self) -> list[dict]:
return await self._get_paginated(
f"/api/0/organizations/{settings.glitchtip_org_slug}/projects/"
)
async def list_issues(
self, project_slug: str, query: str = "is:unresolved", sort: str = "date"
) -> list[dict]:
return await self._get_paginated(
f"/api/0/projects/{settings.glitchtip_org_slug}/{project_slug}/issues/",
params={"query": query, "sort": sort},
)
async def get_issue(self, issue_id: int) -> dict:
return await self._get(f"/api/0/issues/{issue_id}/")
async def close(self) -> None:
if self._client is not None:
await self._client.aclose()
self._client = None
glitchtip_client: GlitchTipClient | None = None
def get_glitchtip_client() -> GlitchTipClient:
global glitchtip_client
if glitchtip_client is None:
glitchtip_client = GlitchTipClient()
return glitchtip_client
async def close_glitchtip_client() -> None:
global glitchtip_client
if glitchtip_client is not None:
await glitchtip_client.close()
glitchtip_client = None

64
src/glitchup_bot/main.py Normal file
View File

@@ -0,0 +1,64 @@
import asyncio
import logging
import uvicorn
from glitchup_bot.api.app import app
from glitchup_bot.bot.bot import close_bot, get_bot, get_dispatcher
from glitchup_bot.config import settings
from glitchup_bot.glitchtip_client.client import close_glitchtip_client
from glitchup_bot.models.database import dispose_engine
from glitchup_bot.tasks.scheduler import setup_scheduler, shutdown_scheduler
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
)
logger = logging.getLogger(__name__)
async def start_api() -> None:
config = uvicorn.Config(app, host="0.0.0.0", port=settings.api_port, log_level="info")
server = uvicorn.Server(config)
await server.serve()
async def start_bot() -> None:
logger.info("Starting Telegram bot polling")
await get_dispatcher().start_polling(get_bot())
async def shutdown_resources() -> None:
await shutdown_scheduler()
await close_glitchtip_client()
await close_bot()
await dispose_engine()
async def main() -> None:
logger.info("GlitchUp Bot starting")
setup_scheduler()
api_task = asyncio.create_task(start_api(), name="api")
bot_task = asyncio.create_task(start_bot(), name="bot")
try:
done, pending = await asyncio.wait(
{api_task, bot_task},
return_when=asyncio.FIRST_COMPLETED,
)
for task in pending:
task.cancel()
await asyncio.gather(*pending, return_exceptions=True)
for task in done:
task.result()
finally:
await shutdown_resources()
def run() -> None:
asyncio.run(main())
if __name__ == "__main__":
run()

View File

@@ -0,0 +1,21 @@
from glitchup_bot.models.base import Base
from glitchup_bot.models.issues import IssueCache
from glitchup_bot.models.mute_rules import MuteRule
from glitchup_bot.models.notifications import NotificationSent
from glitchup_bot.models.ownership import (
GroupSubscriberOverride,
GroupTopicOverride,
ProjectOwnershipOverride,
)
from glitchup_bot.models.sync import SyncState
__all__ = [
"Base",
"GroupSubscriberOverride",
"GroupTopicOverride",
"IssueCache",
"MuteRule",
"NotificationSent",
"ProjectOwnershipOverride",
"SyncState",
]

View File

@@ -0,0 +1,5 @@
from sqlalchemy.orm import DeclarativeBase
class Base(DeclarativeBase):
pass

View File

@@ -0,0 +1,48 @@
from collections.abc import AsyncIterator
from sqlalchemy.ext.asyncio import (
AsyncEngine,
AsyncSession,
async_sessionmaker,
create_async_engine,
)
from glitchup_bot.config import settings
engine: AsyncEngine | None = None
session_factory: async_sessionmaker[AsyncSession] | None = None
def get_engine() -> AsyncEngine:
global engine
if engine is None:
engine = create_async_engine(settings.database_url, echo=False)
return engine
def get_session_factory() -> async_sessionmaker[AsyncSession]:
global session_factory
if session_factory is None:
session_factory = async_sessionmaker(
get_engine(), class_=AsyncSession, expire_on_commit=False
)
return session_factory
async def get_session() -> AsyncIterator[AsyncSession]:
async with get_session_factory()() as session:
yield session
async def dispose_engine() -> None:
global engine, session_factory
if engine is not None:
await engine.dispose()
engine = None
session_factory = None

View File

@@ -0,0 +1,29 @@
from datetime import datetime
from sqlalchemy import BigInteger, DateTime, Integer, String, Text, func
from sqlalchemy.orm import Mapped, mapped_column
from glitchup_bot.models.base import Base
class IssueCache(Base):
__tablename__ = "issues_cache"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True, autoincrement=True)
glitchtip_issue_id: Mapped[int] = mapped_column(BigInteger, unique=True, index=True)
project_slug: Mapped[str] = mapped_column(String(255), index=True)
title: Mapped[str] = mapped_column(Text)
culprit: Mapped[str | None] = mapped_column(Text, nullable=True)
level: Mapped[str] = mapped_column(String(50), default="error")
status: Mapped[str] = mapped_column(String(50), default="unresolved")
first_seen: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
last_seen: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
event_count: Mapped[int] = mapped_column(Integer, default=0)
is_regression: Mapped[bool] = mapped_column(default=False)
link: Mapped[str | None] = mapped_column(Text, nullable=True)
release: Mapped[str | None] = mapped_column(String(255), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), onupdate=func.now()
)

View File

@@ -0,0 +1,16 @@
from datetime import datetime
from sqlalchemy import Boolean, DateTime, Integer, Text, func
from sqlalchemy.orm import Mapped, mapped_column
from glitchup_bot.models.base import Base
class MuteRule(Base):
__tablename__ = "mute_rules"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
pattern: Mapped[str] = mapped_column(Text, unique=True)
description: Mapped[str | None] = mapped_column(Text, nullable=True)
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())

View File

@@ -0,0 +1,20 @@
from datetime import datetime
from sqlalchemy import BigInteger, DateTime, String, func
from sqlalchemy.orm import Mapped, mapped_column
from glitchup_bot.models.base import Base
class NotificationSent(Base):
__tablename__ = "notifications_sent"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True, autoincrement=True)
issue_id: Mapped[int] = mapped_column(BigInteger, index=True)
notification_type: Mapped[str] = mapped_column(String(50)) # "alert", "digest", "uptime"
fingerprint: Mapped[str] = mapped_column(String(255), index=True)
project_slug: Mapped[str | None] = mapped_column(String(255), nullable=True)
group_name: Mapped[str | None] = mapped_column(String(50), nullable=True, index=True)
priority: Mapped[str | None] = mapped_column(String(20), nullable=True)
delivery_status: Mapped[str] = mapped_column(String(50), default="sent")
sent_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())

View File

@@ -0,0 +1,40 @@
from datetime import datetime
from sqlalchemy import BigInteger, DateTime, Integer, String, UniqueConstraint, func
from sqlalchemy.orm import Mapped, mapped_column
from glitchup_bot.models.base import Base
class ProjectOwnershipOverride(Base):
__tablename__ = "project_ownership_overrides"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
project_slug: Mapped[str] = mapped_column(String(255), unique=True, index=True)
group_name: Mapped[str] = mapped_column(String(50))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), onupdate=func.now()
)
class GroupTopicOverride(Base):
__tablename__ = "group_topic_overrides"
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
group_name: Mapped[str] = mapped_column(String(50), unique=True, index=True)
topic_id: Mapped[int] = mapped_column(Integer)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), onupdate=func.now()
)
class GroupSubscriberOverride(Base):
__tablename__ = "group_subscriber_overrides"
__table_args__ = (UniqueConstraint("group_name", "user_id", name="uq_group_subscriber"),)
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
group_name: Mapped[str] = mapped_column(String(50), index=True)
user_id: Mapped[int] = mapped_column(BigInteger, index=True)
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())

View File

@@ -0,0 +1,19 @@
from datetime import datetime
from sqlalchemy import BigInteger, DateTime, String, func
from sqlalchemy.orm import Mapped, mapped_column
from glitchup_bot.models.base import Base
class SyncState(Base):
__tablename__ = "sync_state"
id: Mapped[int] = mapped_column(BigInteger, primary_key=True, autoincrement=True)
source: Mapped[str] = mapped_column(String(100), unique=True) # "api_sync", "webhook"
last_successful_at: Mapped[datetime | None] = mapped_column(
DateTime(timezone=True), nullable=True
)
updated_at: Mapped[datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), onupdate=func.now()
)

View File

View File

@@ -0,0 +1,197 @@
import logging
from datetime import UTC, datetime, timedelta
from sqlalchemy import func, select
from glitchup_bot.api.schemas import GlitchTipWebhookPayload, WebhookAttachment
from glitchup_bot.config import settings
from glitchup_bot.models.database import get_session_factory
from glitchup_bot.models.notifications import NotificationSent
from glitchup_bot.services.mute_rules import find_matching_rule
from glitchup_bot.services.routing import resolve_group
from glitchup_bot.services.sync_service import mark_sync_success
from glitchup_bot.services.telegram_sender import send_alert
logger = logging.getLogger(__name__)
def _extract_field(attachment: WebhookAttachment, field_name: str) -> str | None:
if not attachment.fields:
return None
for field in attachment.fields:
if field.title.lower() == field_name.lower():
return field.value
return None
def _extract_project_slug(attachment: WebhookAttachment) -> str | None:
return _extract_field(attachment, "project")
def _extract_release_name(attachment: WebhookAttachment) -> str | None:
return _extract_field(attachment, "release")
def _build_fingerprint(attachment: WebhookAttachment) -> str:
return f"{_extract_project_slug(attachment)}:{attachment.title}"
async def _is_duplicate(fingerprint: str) -> bool:
cutoff = datetime.now(UTC) - timedelta(hours=settings.dedup_window_hours)
async with get_session_factory()() as session:
result = await session.execute(
select(NotificationSent.id).where(
NotificationSent.fingerprint == fingerprint,
NotificationSent.notification_type == "alert",
NotificationSent.delivery_status == "sent",
NotificationSent.sent_at >= cutoff,
)
)
return result.scalar_one_or_none() is not None
async def _is_rate_limited(group_name: str, priority: str) -> bool:
if priority == "P1":
return False
cutoff = datetime.now(UTC) - timedelta(minutes=settings.alert_rate_limit_window_minutes)
async with get_session_factory()() as session:
result = await session.execute(
select(func.count(NotificationSent.id)).where(
NotificationSent.notification_type == "alert",
NotificationSent.delivery_status == "sent",
NotificationSent.group_name == group_name,
NotificationSent.priority == priority,
NotificationSent.sent_at >= cutoff,
)
)
count = result.scalar_one()
return count >= settings.alert_rate_limit_count
async def _record_notification(
issue_id: int,
fingerprint: str,
*,
project_slug: str | None,
group_name: str | None,
priority: str | None,
delivery_status: str,
notification_type: str = "alert",
) -> None:
async with get_session_factory()() as session:
session.add(
NotificationSent(
issue_id=issue_id,
notification_type=notification_type,
fingerprint=fingerprint,
project_slug=project_slug,
group_name=group_name,
priority=priority,
delivery_status=delivery_status,
)
)
await session.commit()
def _determine_priority(attachment: WebhookAttachment, project_slug: str | None) -> str:
environment = settings.get_environment(project_slug) if project_slug else None
if attachment.color == "#e52b50" and environment == "production":
return "P1"
if environment == "production":
return "P2"
return "P3"
async def process_webhook_payload(raw_payload: dict) -> None:
payload = GlitchTipWebhookPayload(**raw_payload)
await mark_sync_success("webhook")
if payload.is_uptime_alert():
for attachment in payload.attachments:
group_name = await send_alert(
attachment,
project_slug=None,
priority="P1",
is_uptime=True,
)
await _record_notification(
issue_id=0,
fingerprint=f"uptime:{attachment.title}",
project_slug=None,
group_name=group_name,
priority="P1",
delivery_status="sent",
notification_type="uptime",
)
return
for attachment in payload.attachments:
project_slug = _extract_project_slug(attachment)
if not project_slug:
logger.warning(
"Skipping webhook attachment without project field: %s", attachment.title
)
continue
if not settings.is_alert_environment(project_slug):
logger.debug("Skipping non-alert environment: %s", project_slug)
continue
fingerprint = _build_fingerprint(attachment)
if await _is_duplicate(fingerprint):
logger.debug("Skipping duplicate alert for %s", fingerprint)
continue
priority = _determine_priority(attachment, project_slug)
if priority == "P3":
logger.debug("Skipping P3 alert for %s", attachment.title)
continue
group_name = await resolve_group(project_slug)
muted_by = await find_matching_rule(attachment, project_slug)
if muted_by is not None:
logger.info("Muted alert %s by rule %s", fingerprint, muted_by.pattern)
await _record_notification(
issue_id=0,
fingerprint=fingerprint,
project_slug=project_slug,
group_name=group_name,
priority=priority,
delivery_status="muted",
)
continue
if await _is_rate_limited(group_name, priority):
logger.info("Rate-limited alert %s for group %s", fingerprint, group_name)
await _record_notification(
issue_id=0,
fingerprint=fingerprint,
project_slug=project_slug,
group_name=group_name,
priority=priority,
delivery_status="rate_limited",
)
continue
group_name = await send_alert(
attachment,
project_slug,
priority,
release_name=_extract_release_name(attachment),
)
await _record_notification(
issue_id=0,
fingerprint=fingerprint,
project_slug=project_slug,
group_name=group_name,
priority=priority,
delivery_status="sent",
)

View File

@@ -0,0 +1,263 @@
from collections import defaultdict
from datetime import UTC, datetime, timedelta
from html import escape
from glitchup_bot.services.sync_service import (
IssueSnapshot,
SyncSummary,
get_last_sync_state,
load_issue_snapshots,
sync_issues,
)
def _issue_label(issue: IssueSnapshot) -> str:
title = escape(issue.title)
slug = escape(issue.project_slug)
if issue.link:
return f'<a href="{escape(issue.link, quote=True)}">{title}</a> ({slug})'
return f"{title} ({slug})"
async def _load_issues(
project_slugs: list[str] | None = None,
*,
refresh: bool = True,
unresolved_only: bool = True,
) -> list[IssueSnapshot]:
return await load_issue_snapshots(
project_slugs,
refresh=refresh,
unresolved_only=unresolved_only,
)
async def build_digest(refresh: bool = True) -> str:
now = datetime.now(UTC)
week_ago = now - timedelta(days=7)
issues = await _load_issues(refresh=refresh)
new_issues: list[IssueSnapshot] = []
regressions: list[IssueSnapshot] = []
stale: list[IssueSnapshot] = []
by_release: dict[str, list[IssueSnapshot]] = defaultdict(list)
project_stats: dict[str, dict[str, int]] = defaultdict(
lambda: {"new": 0, "regression": 0, "events": 0}
)
top_noisy = sorted(issues, key=lambda item: item.event_count, reverse=True)
for issue in issues:
if issue.first_seen and issue.first_seen >= week_ago:
new_issues.append(issue)
project_stats[issue.project_slug]["new"] += 1
if issue.is_regression:
regressions.append(issue)
project_stats[issue.project_slug]["regression"] += 1
if issue.first_seen and issue.first_seen < week_ago:
stale.append(issue)
if issue.release:
by_release[issue.release].append(issue)
project_stats[issue.project_slug]["events"] += issue.event_count
lines = [
"<b>📊 GlitchTip digest за неделю</b>",
"",
"<b>Всего:</b>",
f"• новых issues: {len(new_issues)}",
f"• regressions: {len(regressions)}",
f"• unresolved > 7 дней: {len(stale)}",
"",
]
if project_stats:
lines.append("<b>По проектам:</b>")
for slug, stats in sorted(
project_stats.items(),
key=lambda item: (item[1]["new"], item[1]["regression"], item[0]),
reverse=True,
)[:5]:
parts = [f"{stats['new']} новых"]
if stats["regression"]:
parts.append(f"{stats['regression']} regression")
lines.append(f"• <b>{escape(slug)}</b> — {', '.join(parts)}")
lines.append("")
if by_release:
lines.append("<b>После релизов:</b>")
for release_name, release_issues in sorted(
by_release.items(),
key=lambda item: (len(item[1]), sum(issue.event_count for issue in item[1])),
reverse=True,
)[:5]:
lines.append(
f"• <b>{escape(release_name)}</b> — "
f"{len(release_issues)} issues, "
f"{sum(issue.event_count for issue in release_issues)} событий"
)
lines.append("")
if top_noisy:
lines.append("<b>Топ шумных:</b>")
for issue in top_noisy[:5]:
lines.append(f"{_issue_label(issue)}{issue.event_count} событий")
lines.append("")
if stale:
lines.append("<b>Хвосты:</b>")
for issue in sorted(
stale,
key=lambda item: (now - (item.first_seen or now)).days,
reverse=True,
)[:5]:
age = (now - (issue.first_seen or now)).days
lines.append(f"{_issue_label(issue)}{age} дн. без разбора")
if len(lines) == 7:
lines.append("Все чисто! Новых проблем нет.")
return "\n".join(lines)
async def build_today_summary(refresh: bool = True) -> str:
now = datetime.now(UTC)
today_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
issues = await _load_issues(refresh=refresh)
today_issues = [
issue for issue in issues if issue.first_seen and issue.first_seen >= today_start
]
if not today_issues:
return "За сегодня новых issues не обнаружено."
lines = [f"<b>📋 Сегодня: {len(today_issues)} новых issues</b>", ""]
for issue in sorted(
today_issues,
key=lambda item: item.first_seen or now,
reverse=True,
)[:10]:
lines.append(f"{_issue_label(issue)}{issue.event_count} событий")
return "\n".join(lines)
async def build_project_summary(project_slug: str, refresh: bool = True) -> str:
issues = await _load_issues([project_slug], refresh=refresh)
if not issues:
return f"<b>{escape(project_slug)}</b>: нет unresolved issues."
total_events = sum(issue.event_count for issue in issues)
regressions = [issue for issue in issues if issue.is_regression]
lines = [
f"<b>📦 {escape(project_slug)}</b>",
"",
f"• unresolved issues: {len(issues)}",
f"• всего событий: {total_events}",
f"• regressions: {len(regressions)}",
"",
"<b>Последние:</b>",
]
for issue in sorted(
issues,
key=lambda item: item.last_seen or datetime.min.replace(tzinfo=UTC),
reverse=True,
)[:5]:
lines.append(f"{_issue_label(issue)}{issue.event_count} событий")
return "\n".join(lines)
async def build_top_issues(limit: int = 10, refresh: bool = True) -> str:
issues = await _load_issues(refresh=refresh)
if not issues:
return "Нет unresolved issues."
lines = ["<b>🔊 Топ шумных issues</b>", ""]
for issue in sorted(issues, key=lambda item: item.event_count, reverse=True)[:limit]:
lines.append(f"• <b>{issue.event_count}</b> событий — {_issue_label(issue)}")
return "\n".join(lines)
async def build_stale_issues(
min_days: int = 7,
limit: int = 10,
refresh: bool = True,
) -> str:
now = datetime.now(UTC)
issues = await _load_issues(refresh=refresh)
stale = [
issue for issue in issues if issue.first_seen and (now - issue.first_seen).days >= min_days
]
if not stale:
return "Нет старых незакрытых issues (> 7 дней)."
lines = ["<b>🕸 Старые незакрытые issues</b>", ""]
for issue in sorted(
stale,
key=lambda item: now - (item.first_seen or now),
reverse=True,
)[:limit]:
age = (now - (issue.first_seen or now)).days
lines.append(f"• <b>{age} дн.</b> — {_issue_label(issue)} ({issue.event_count} событий)")
return "\n".join(lines)
async def build_release_summary(limit: int = 10, refresh: bool = True) -> str:
issues = await _load_issues(refresh=refresh)
grouped: dict[str, list[IssueSnapshot]] = defaultdict(list)
for issue in issues:
if issue.release:
grouped[issue.release].append(issue)
if not grouped:
return "Релизы в данных GlitchTip не обнаружены."
lines = ["<b>🚀 Релизы с незакрытыми issue</b>", ""]
for release_name, release_issues in sorted(
grouped.items(),
key=lambda item: (len(item[1]), sum(issue.event_count for issue in item[1])),
reverse=True,
)[:limit]:
lines.append(
f"• <b>{escape(release_name)}</b> — {len(release_issues)} issues, "
f"{sum(issue.event_count for issue in release_issues)} событий"
)
return "\n".join(lines)
async def build_release_detail(release_name: str, refresh: bool = True) -> str:
issues = await _load_issues(refresh=refresh)
matched = [issue for issue in issues if issue.release == release_name]
if not matched:
return f"Для релиза <b>{escape(release_name)}</b> незакрытых issues не найдено."
lines = [f"<b>🚀 Релиз {escape(release_name)}</b>", ""]
for issue in sorted(matched, key=lambda item: item.event_count, reverse=True)[:10]:
suffix = " regression" if issue.is_regression else ""
lines.append(f"{_issue_label(issue)}{issue.event_count} событий{suffix}")
return "\n".join(lines)
async def run_manual_sync() -> SyncSummary:
return await sync_issues()
async def build_sync_status() -> str:
state = await get_last_sync_state("api_sync")
if state is None or state.last_successful_at is None:
return "Синхронизация ещё не выполнялась."
return (
"<b>Последняя синхронизация</b>\n\n"
f"• источник: api_sync\n"
f"• время: {escape(state.last_successful_at.isoformat())}"
)

View File

@@ -0,0 +1,74 @@
import re
from sqlalchemy import select
from glitchup_bot.api.schemas import WebhookAttachment
from glitchup_bot.models.database import get_session_factory
from glitchup_bot.models.mute_rules import MuteRule
def _rule_target_text(attachment: WebhookAttachment, project_slug: str | None) -> str:
fields = [project_slug or "", attachment.title, attachment.text or ""]
return "\n".join(fields)
def validate_pattern(pattern: str) -> str:
normalized = pattern.strip()
if not normalized:
raise ValueError("Pattern must not be empty")
re.compile(normalized)
return normalized
async def find_matching_rule(
attachment: WebhookAttachment,
project_slug: str | None,
) -> MuteRule | None:
target = _rule_target_text(attachment, project_slug)
async with get_session_factory()() as session:
result = await session.execute(
select(MuteRule).where(MuteRule.is_active.is_(True)).order_by(MuteRule.id)
)
rules = list(result.scalars().all())
for rule in rules:
if re.search(rule.pattern, target, flags=re.IGNORECASE):
return rule
return None
async def add_rule(pattern: str, description: str | None = None) -> MuteRule:
normalized = validate_pattern(pattern)
async with get_session_factory()() as session:
result = await session.execute(select(MuteRule).where(MuteRule.pattern == normalized))
rule = result.scalar_one_or_none()
if rule is None:
rule = MuteRule(pattern=normalized, description=description)
session.add(rule)
else:
rule.description = description
rule.is_active = True
await session.commit()
await session.refresh(rule)
return rule
async def remove_rule(rule_id: int) -> bool:
async with get_session_factory()() as session:
result = await session.execute(select(MuteRule).where(MuteRule.id == rule_id))
rule = result.scalar_one_or_none()
if rule is None:
return False
await session.delete(rule)
await session.commit()
return True
async def list_rules() -> list[MuteRule]:
async with get_session_factory()() as session:
result = await session.execute(select(MuteRule).order_by(MuteRule.id))
return list(result.scalars().all())

View File

@@ -0,0 +1,195 @@
from sqlalchemy import select
from glitchup_bot.config import settings
from glitchup_bot.models.database import get_session_factory
from glitchup_bot.models.ownership import (
GroupSubscriberOverride,
GroupTopicOverride,
ProjectOwnershipOverride,
)
PROJECT_GROUPS = {"backend", "frontend"}
TOPIC_GROUPS = {"backend", "frontend", "digest"}
def validate_project_group(group_name: str) -> str:
normalized = group_name.lower()
if normalized not in PROJECT_GROUPS:
raise ValueError("Group must be backend or frontend")
return normalized
def validate_topic_group(group_name: str) -> str:
normalized = group_name.lower()
if normalized not in TOPIC_GROUPS:
raise ValueError("Group must be backend, frontend, or digest")
return normalized
async def resolve_group(project_slug: str | None) -> str:
if not project_slug:
return "backend"
async with get_session_factory()() as session:
result = await session.execute(
select(ProjectOwnershipOverride.group_name).where(
ProjectOwnershipOverride.project_slug == project_slug
)
)
group_name = result.scalar_one_or_none()
return group_name or settings.get_group(project_slug)
async def resolve_topic_id(group_name: str) -> int:
normalized = validate_topic_group(group_name)
async with get_session_factory()() as session:
result = await session.execute(
select(GroupTopicOverride.topic_id).where(GroupTopicOverride.group_name == normalized)
)
topic_id = result.scalar_one_or_none()
return topic_id if topic_id is not None else settings.get_topic_id(normalized)
async def resolve_subscribers(group_name: str) -> list[int]:
normalized = validate_project_group(group_name)
subscribers = set(settings.get_subscribers(normalized))
async with get_session_factory()() as session:
result = await session.execute(
select(GroupSubscriberOverride.user_id).where(
GroupSubscriberOverride.group_name == normalized
)
)
subscribers.update(result.scalars().all())
return sorted(subscribers)
async def set_project_group(project_slug: str, group_name: str) -> None:
normalized = validate_project_group(group_name)
async with get_session_factory()() as session:
result = await session.execute(
select(ProjectOwnershipOverride).where(
ProjectOwnershipOverride.project_slug == project_slug
)
)
record = result.scalar_one_or_none()
if record is None:
record = ProjectOwnershipOverride(project_slug=project_slug, group_name=normalized)
session.add(record)
else:
record.group_name = normalized
await session.commit()
async def clear_project_group(project_slug: str) -> bool:
async with get_session_factory()() as session:
result = await session.execute(
select(ProjectOwnershipOverride).where(
ProjectOwnershipOverride.project_slug == project_slug
)
)
record = result.scalar_one_or_none()
if record is None:
return False
await session.delete(record)
await session.commit()
return True
async def set_topic_override(group_name: str, topic_id: int) -> None:
normalized = validate_topic_group(group_name)
async with get_session_factory()() as session:
result = await session.execute(
select(GroupTopicOverride).where(GroupTopicOverride.group_name == normalized)
)
record = result.scalar_one_or_none()
if record is None:
record = GroupTopicOverride(group_name=normalized, topic_id=topic_id)
session.add(record)
else:
record.topic_id = topic_id
await session.commit()
async def clear_topic_override(group_name: str) -> bool:
normalized = validate_topic_group(group_name)
async with get_session_factory()() as session:
result = await session.execute(
select(GroupTopicOverride).where(GroupTopicOverride.group_name == normalized)
)
record = result.scalar_one_or_none()
if record is None:
return False
await session.delete(record)
await session.commit()
return True
async def add_subscriber(group_name: str, user_id: int) -> None:
normalized = validate_project_group(group_name)
async with get_session_factory()() as session:
result = await session.execute(
select(GroupSubscriberOverride).where(
GroupSubscriberOverride.group_name == normalized,
GroupSubscriberOverride.user_id == user_id,
)
)
record = result.scalar_one_or_none()
if record is None:
session.add(GroupSubscriberOverride(group_name=normalized, user_id=user_id))
await session.commit()
async def remove_subscriber(group_name: str, user_id: int) -> bool:
normalized = validate_project_group(group_name)
async with get_session_factory()() as session:
result = await session.execute(
select(GroupSubscriberOverride).where(
GroupSubscriberOverride.group_name == normalized,
GroupSubscriberOverride.user_id == user_id,
)
)
record = result.scalar_one_or_none()
if record is None:
return False
await session.delete(record)
await session.commit()
return True
async def list_project_overrides() -> list[ProjectOwnershipOverride]:
async with get_session_factory()() as session:
result = await session.execute(
select(ProjectOwnershipOverride).order_by(ProjectOwnershipOverride.project_slug)
)
return list(result.scalars().all())
async def list_topic_overrides() -> list[GroupTopicOverride]:
async with get_session_factory()() as session:
result = await session.execute(
select(GroupTopicOverride).order_by(GroupTopicOverride.group_name)
)
return list(result.scalars().all())
async def list_subscriber_overrides() -> list[GroupSubscriberOverride]:
async with get_session_factory()() as session:
result = await session.execute(
select(GroupSubscriberOverride).order_by(
GroupSubscriberOverride.group_name, GroupSubscriberOverride.user_id
)
)
return list(result.scalars().all())

View File

@@ -0,0 +1,233 @@
import logging
from collections import defaultdict
from dataclasses import dataclass
from datetime import UTC, datetime
from typing import Any
from sqlalchemy import select
from glitchup_bot.config import settings
from glitchup_bot.glitchtip_client.client import get_glitchtip_client
from glitchup_bot.models.database import get_session_factory
from glitchup_bot.models.issues import IssueCache
from glitchup_bot.models.sync import SyncState
logger = logging.getLogger(__name__)
@dataclass(slots=True)
class IssueSnapshot:
issue_id: int
project_slug: str
title: str
culprit: str | None
level: str
status: str
first_seen: datetime | None
last_seen: datetime | None
event_count: int
is_regression: bool
link: str | None
release: str | None
@dataclass(slots=True)
class SyncSummary:
project_count: int
issue_count: int
resolved_count: int
synced_at: datetime
def _configured_project_slugs() -> list[str]:
return settings.backend_projects + settings.frontend_projects
def _parse_dt(value: str | None) -> datetime | None:
if not value:
return None
return datetime.fromisoformat(value.replace("Z", "+00:00"))
def _extract_release(issue: dict[str, Any]) -> str | None:
direct = issue.get("lastRelease") or issue.get("release") or issue.get("releaseName")
if isinstance(direct, str) and direct.strip():
return direct.strip()
if isinstance(direct, dict):
for key in ("version", "shortVersion", "name"):
value = direct.get(key)
if isinstance(value, str) and value.strip():
return value.strip()
tags = issue.get("tags")
if isinstance(tags, list):
for tag in tags:
if not isinstance(tag, dict):
continue
if tag.get("key") != "release":
continue
value = tag.get("value")
if isinstance(value, str) and value.strip():
return value.strip()
return None
def _normalize_issue(project_slug: str, issue: dict[str, Any]) -> IssueSnapshot:
issue_id = int(issue["id"])
return IssueSnapshot(
issue_id=issue_id,
project_slug=project_slug,
title=issue.get("title") or "unknown",
culprit=issue.get("culprit"),
level=(issue.get("level") or "error").lower(),
status=(issue.get("status") or "unresolved").lower(),
first_seen=_parse_dt(issue.get("firstSeen")),
last_seen=_parse_dt(issue.get("lastSeen")),
event_count=int(issue.get("count") or 0),
is_regression=bool(issue.get("isRegression")),
link=issue.get("permalink") or issue.get("link"),
release=_extract_release(issue),
)
async def mark_sync_success(source: str) -> None:
now = datetime.now(UTC)
async with get_session_factory()() as session:
result = await session.execute(select(SyncState).where(SyncState.source == source))
record = result.scalar_one_or_none()
if record is None:
record = SyncState(source=source, last_successful_at=now)
session.add(record)
else:
record.last_successful_at = now
await session.commit()
async def sync_issues(project_slugs: list[str] | None = None) -> SyncSummary:
slugs = project_slugs or _configured_project_slugs()
client = get_glitchtip_client()
snapshots: list[IssueSnapshot] = []
for slug in slugs:
issues = await client.list_issues(slug)
snapshots.extend(
_normalize_issue(slug, issue) for issue in issues if issue.get("id") is not None
)
issue_ids_by_slug: dict[str, set[int]] = defaultdict(set)
for snapshot in snapshots:
issue_ids_by_slug[snapshot.project_slug].add(snapshot.issue_id)
now = datetime.now(UTC)
resolved_count = 0
async with get_session_factory()() as session:
existing_rows = (
await session.execute(select(IssueCache).where(IssueCache.project_slug.in_(slugs)))
).scalars()
existing_by_id = {row.glitchtip_issue_id: row for row in existing_rows}
for snapshot in snapshots:
row = existing_by_id.get(snapshot.issue_id)
if row is None:
row = IssueCache(
glitchtip_issue_id=snapshot.issue_id,
project_slug=snapshot.project_slug,
title=snapshot.title,
culprit=snapshot.culprit,
level=snapshot.level,
status=snapshot.status,
first_seen=snapshot.first_seen,
last_seen=snapshot.last_seen,
event_count=snapshot.event_count,
is_regression=snapshot.is_regression,
link=snapshot.link,
release=snapshot.release,
)
session.add(row)
continue
row.project_slug = snapshot.project_slug
row.title = snapshot.title
row.culprit = snapshot.culprit
row.level = snapshot.level
row.status = snapshot.status
row.first_seen = snapshot.first_seen
row.last_seen = snapshot.last_seen
row.event_count = snapshot.event_count
row.is_regression = snapshot.is_regression
row.link = snapshot.link
row.release = snapshot.release
row.updated_at = now
for row in existing_by_id.values():
if row.glitchtip_issue_id in issue_ids_by_slug[row.project_slug]:
continue
if row.status != "resolved":
row.status = "resolved"
row.updated_at = now
resolved_count += 1
result = await session.execute(select(SyncState).where(SyncState.source == "api_sync"))
state = result.scalar_one_or_none()
if state is None:
state = SyncState(source="api_sync", last_successful_at=now)
session.add(state)
else:
state.last_successful_at = now
await session.commit()
return SyncSummary(
project_count=len(slugs),
issue_count=len(snapshots),
resolved_count=resolved_count,
synced_at=now,
)
async def load_issue_snapshots(
project_slugs: list[str] | None = None,
*,
refresh: bool = True,
unresolved_only: bool = True,
) -> list[IssueSnapshot]:
slugs = project_slugs or _configured_project_slugs()
if refresh:
try:
await sync_issues(slugs)
except Exception:
logger.exception("Issue sync failed, falling back to cached data")
async with get_session_factory()() as session:
stmt = select(IssueCache).where(IssueCache.project_slug.in_(slugs))
if unresolved_only:
stmt = stmt.where(IssueCache.status == "unresolved")
rows = (await session.execute(stmt)).scalars().all()
return [
IssueSnapshot(
issue_id=row.glitchtip_issue_id,
project_slug=row.project_slug,
title=row.title,
culprit=row.culprit,
level=row.level,
status=row.status,
first_seen=row.first_seen,
last_seen=row.last_seen,
event_count=row.event_count,
is_regression=row.is_regression,
link=row.link,
release=row.release,
)
for row in rows
]
async def get_last_sync_state(source: str) -> SyncState | None:
async with get_session_factory()() as session:
result = await session.execute(select(SyncState).where(SyncState.source == source))
return result.scalar_one_or_none()

View File

@@ -0,0 +1,113 @@
import html
import logging
from glitchup_bot.api.schemas import WebhookAttachment
from glitchup_bot.bot.bot import get_bot
from glitchup_bot.config import settings
from glitchup_bot.services.routing import resolve_group, resolve_subscribers, resolve_topic_id
logger = logging.getLogger(__name__)
def _escape(value: str | None) -> str:
return html.escape(value or "unknown")
def _format_link(label: str, url: str | None) -> str | None:
if not url:
return None
return f'<a href="{html.escape(url, quote=True)}">{html.escape(label)}</a>'
def _format_alert_message(
attachment: WebhookAttachment,
project_slug: str | None,
priority: str,
is_uptime: bool = False,
release_name: str | None = None,
) -> str:
if is_uptime:
lines = [
"⚠️ <b>GlitchTip Uptime Alert</b>",
"",
f"<b>Монитор:</b> {_escape(attachment.title)}",
f"<b>Статус:</b> {_escape(attachment.text)}",
]
open_link = _format_link("Открыть в GlitchTip", attachment.title_link)
if open_link:
lines.append(open_link)
return "\n".join(lines)
icon = "🔥" if priority == "P1" else "🟡"
lines = [
f"{icon} <b>GlitchTip alert / {html.escape(priority)}</b>",
"",
f"<b>Проект:</b> {_escape(project_slug)}",
f"<b>Проблема:</b> {_escape(attachment.title)}",
]
if attachment.text:
lines.append(f"<b>Где:</b> {_escape(attachment.text)}")
if release_name:
lines.append(f"<b>Релиз:</b> {_escape(release_name)}")
open_link = _format_link("Открыть в GlitchTip", attachment.title_link)
if open_link:
lines.append(open_link)
return "\n".join(lines)
async def send_alert(
attachment: WebhookAttachment,
project_slug: str | None,
priority: str,
is_uptime: bool = False,
release_name: str | None = None,
) -> str:
text = _format_alert_message(
attachment,
project_slug,
priority,
is_uptime=is_uptime,
release_name=release_name,
)
group_name = await resolve_group(project_slug)
topic_id = await resolve_topic_id(group_name)
bot = get_bot()
try:
await bot.send_message(
chat_id=settings.telegram_group_chat_id,
message_thread_id=topic_id,
text=text,
disable_web_page_preview=True,
)
except Exception:
logger.exception("Failed to send alert to topic %s", topic_id)
for user_id in await resolve_subscribers(group_name):
try:
await bot.send_message(
chat_id=user_id,
text=text,
disable_web_page_preview=True,
)
except Exception:
logger.exception("Failed to send DM to user %s", user_id)
return group_name
async def send_digest_message(text: str) -> None:
bot = get_bot()
topic_id = await resolve_topic_id("digest")
try:
await bot.send_message(
chat_id=settings.telegram_group_chat_id,
message_thread_id=topic_id,
text=text,
disable_web_page_preview=True,
)
except Exception:
logger.exception("Failed to send digest")

View File

View File

@@ -0,0 +1,81 @@
import logging
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from glitchup_bot.config import settings
from glitchup_bot.services.digest_builder import build_digest
from glitchup_bot.services.sync_service import sync_issues
from glitchup_bot.services.telegram_sender import send_digest_message
logger = logging.getLogger(__name__)
scheduler: AsyncIOScheduler | None = None
async def weekly_digest_job() -> None:
logger.info("Running weekly digest job")
try:
await send_digest_message(await build_digest(refresh=True))
logger.info("Weekly digest sent successfully")
except Exception:
logger.exception("Failed to send weekly digest")
async def sync_job() -> None:
logger.info("Running scheduled issue sync")
try:
summary = await sync_issues()
logger.info(
"Issue sync finished: %s projects, %s issues, %s resolved",
summary.project_count,
summary.issue_count,
summary.resolved_count,
)
except Exception:
logger.exception("Scheduled issue sync failed")
def setup_scheduler() -> AsyncIOScheduler:
global scheduler
if scheduler is not None and scheduler.running:
return scheduler
scheduler = AsyncIOScheduler()
scheduler.add_job(
sync_job,
IntervalTrigger(minutes=settings.sync_interval_minutes),
id="issue_sync",
replace_existing=True,
)
scheduler.add_job(
weekly_digest_job,
CronTrigger(
day_of_week=settings.digest_cron_day,
hour=settings.digest_cron_hour,
minute=settings.digest_cron_minute,
timezone=settings.digest_timezone,
),
id="weekly_digest",
replace_existing=True,
)
scheduler.start()
logger.info(
"Scheduler started: sync every %s min, digest at %s %02d:%02d %s",
settings.sync_interval_minutes,
settings.digest_cron_day,
settings.digest_cron_hour,
settings.digest_cron_minute,
settings.digest_timezone,
)
return scheduler
async def shutdown_scheduler() -> None:
global scheduler
if scheduler is not None:
scheduler.shutdown(wait=False)
scheduler = None

46
tests/conftest.py Normal file
View File

@@ -0,0 +1,46 @@
import os
import pytest
from glitchup_bot.config import clear_settings_cache
DEFAULT_ENV = {
"TELEGRAM_BOT_TOKEN": "token",
"TELEGRAM_GROUP_CHAT_ID": "-1001234567890",
"TELEGRAM_BACKEND_TOPIC_ID": "11",
"TELEGRAM_FRONTEND_TOPIC_ID": "22",
"TELEGRAM_DIGEST_TOPIC_ID": "33",
"BACKEND_PROJECTS": "backend-production,backend-staging",
"FRONTEND_PROJECTS": "frontend-production,frontend-staging",
"BACKEND_SUBSCRIBERS": "",
"FRONTEND_SUBSCRIBERS": "",
"TELEGRAM_ADMIN_IDS": "",
"GLITCHTIP_URL": "https://glitchtip.example.com",
"GLITCHTIP_API_TOKEN": "secret",
"GLITCHTIP_ORG_SLUG": "org",
"DATABASE_URL": "postgresql+asyncpg://glitchup:glitchup@db:5432/glitchup",
"API_PORT": "8080",
"WEBHOOK_SECRET": "",
"DIGEST_CRON_DAY": "mon",
"DIGEST_CRON_HOUR": "10",
"DIGEST_CRON_MINUTE": "0",
"DIGEST_TIMEZONE": "Asia/Krasnoyarsk",
"SYNC_INTERVAL_MINUTES": "30",
"ALERT_ENVIRONMENTS": "production",
"DEDUP_WINDOW_HOURS": "6",
"ALERT_RATE_LIMIT_COUNT": "10",
"ALERT_RATE_LIMIT_WINDOW_MINUTES": "15",
}
for key, value in DEFAULT_ENV.items():
os.environ.setdefault(key, value)
@pytest.fixture(autouse=True)
def reset_settings(monkeypatch: pytest.MonkeyPatch) -> None:
for key, value in DEFAULT_ENV.items():
monkeypatch.setenv(key, value)
clear_settings_cache()
yield
clear_settings_cache()

View File

@@ -0,0 +1,165 @@
import pytest
from glitchup_bot.services import alert_processor
def make_issue_payload(project: str = "backend-production", color: str = "#e52b50") -> dict:
return {
"text": "GlitchTip Alert",
"attachments": [
{
"title": "ValueError: boom",
"title_link": "https://glitchtip.example.com/issues/1",
"text": "app.views.index",
"color": color,
"fields": [
{"title": "Project", "value": project, "short": True},
{"title": "Environment", "value": "production", "short": True},
],
}
],
}
async def _noop(*args, **kwargs):
return None
@pytest.mark.asyncio
async def test_process_webhook_sends_and_records_notification(monkeypatch):
sent_calls = []
recorded = []
async def fake_send_alert(attachment, project_slug, priority, **kwargs):
sent_calls.append((attachment.title, project_slug, priority))
return "backend"
async def fake_is_duplicate(fingerprint: str) -> bool:
assert fingerprint == "backend-production:ValueError: boom"
return False
async def fake_record(issue_id: int, fingerprint: str, **kwargs) -> None:
recorded.append((issue_id, fingerprint, kwargs["delivery_status"]))
monkeypatch.setattr(alert_processor, "send_alert", fake_send_alert)
monkeypatch.setattr(alert_processor, "_is_duplicate", fake_is_duplicate)
monkeypatch.setattr(alert_processor, "_record_notification", fake_record)
monkeypatch.setattr(alert_processor, "mark_sync_success", _noop)
monkeypatch.setattr(
alert_processor, "resolve_group", lambda project_slug: _async_value("backend")
)
monkeypatch.setattr(alert_processor, "find_matching_rule", lambda *args: _async_value(None))
monkeypatch.setattr(alert_processor, "_is_rate_limited", lambda *args: _async_value(False))
await alert_processor.process_webhook_payload(make_issue_payload())
assert sent_calls == [("ValueError: boom", "backend-production", "P1")]
assert recorded == [(0, "backend-production:ValueError: boom", "sent")]
@pytest.mark.asyncio
async def test_process_webhook_skips_duplicate(monkeypatch):
async def fake_send_alert(*args, **kwargs):
raise AssertionError("duplicate alert should not be sent")
monkeypatch.setattr(alert_processor, "send_alert", fake_send_alert)
monkeypatch.setattr(alert_processor, "_is_duplicate", lambda fingerprint: _async_value(True))
monkeypatch.setattr(alert_processor, "mark_sync_success", _noop)
await alert_processor.process_webhook_payload(make_issue_payload())
@pytest.mark.asyncio
async def test_process_webhook_skips_non_alert_environment(monkeypatch):
called = False
async def fake_send_alert(*args, **kwargs):
nonlocal called
called = True
monkeypatch.setattr(alert_processor, "send_alert", fake_send_alert)
monkeypatch.setattr(alert_processor, "mark_sync_success", _noop)
await alert_processor.process_webhook_payload(make_issue_payload(project="backend-staging"))
assert called is False
@pytest.mark.asyncio
async def test_process_webhook_handles_uptime(monkeypatch):
calls = []
async def fake_send_alert(attachment, project_slug, priority, is_uptime=False, **kwargs):
calls.append((attachment.title, project_slug, priority, is_uptime))
return "backend"
monkeypatch.setattr(alert_processor, "send_alert", fake_send_alert)
monkeypatch.setattr(alert_processor, "_record_notification", _noop)
monkeypatch.setattr(alert_processor, "mark_sync_success", _noop)
await alert_processor.process_webhook_payload(
{
"text": "GlitchTip Uptime Alert",
"attachments": [
{
"title": "Example Monitor",
"text": "The monitored site has gone down.",
"title_link": "https://glitchtip.example.com/uptime/1",
}
],
}
)
assert calls == [("Example Monitor", None, "P1", True)]
@pytest.mark.asyncio
async def test_process_webhook_skips_muted(monkeypatch):
recorded = []
class FakeRule:
pattern = "ValueError"
async def fake_record(issue_id: int, fingerprint: str, **kwargs) -> None:
recorded.append(kwargs["delivery_status"])
monkeypatch.setattr(alert_processor, "_is_duplicate", lambda fingerprint: _async_value(False))
monkeypatch.setattr(
alert_processor, "find_matching_rule", lambda *args: _async_value(FakeRule())
)
monkeypatch.setattr(alert_processor, "_record_notification", fake_record)
monkeypatch.setattr(
alert_processor, "resolve_group", lambda project_slug: _async_value("backend")
)
monkeypatch.setattr(alert_processor, "mark_sync_success", _noop)
await alert_processor.process_webhook_payload(make_issue_payload())
assert recorded == ["muted"]
@pytest.mark.asyncio
async def test_process_webhook_skips_rate_limited(monkeypatch):
recorded = []
async def fake_record(issue_id: int, fingerprint: str, **kwargs) -> None:
recorded.append(kwargs["delivery_status"])
monkeypatch.setattr(alert_processor, "_is_duplicate", lambda fingerprint: _async_value(False))
monkeypatch.setattr(alert_processor, "find_matching_rule", lambda *args: _async_value(None))
monkeypatch.setattr(alert_processor, "_record_notification", fake_record)
monkeypatch.setattr(
alert_processor, "resolve_group", lambda project_slug: _async_value("backend")
)
monkeypatch.setattr(alert_processor, "_is_rate_limited", lambda *args: _async_value(True))
monkeypatch.setattr(alert_processor, "mark_sync_success", _noop)
await alert_processor.process_webhook_payload(
make_issue_payload(project="backend-production", color="#e9b949")
)
assert recorded == ["rate_limited"]
async def _async_value(value):
return value

42
tests/test_api_webhook.py Normal file
View File

@@ -0,0 +1,42 @@
import pytest
from httpx import ASGITransport, AsyncClient
from glitchup_bot.api.app import app
from glitchup_bot.config import clear_settings_cache
@pytest.mark.asyncio
async def test_webhook_rejects_invalid_secret(monkeypatch):
monkeypatch.setenv("WEBHOOK_SECRET", "expected-secret")
clear_settings_cache()
transport = ASGITransport(app=app)
async with AsyncClient(transport=transport, base_url="http://testserver") as client:
response = await client.post(
"/webhooks/glitchtip", json={"text": "GlitchTip Alert", "attachments": []}
)
assert response.status_code == 403
@pytest.mark.asyncio
async def test_webhook_accepts_valid_secret(monkeypatch):
received = []
async def fake_process(payload: dict) -> None:
received.append(payload)
monkeypatch.setenv("WEBHOOK_SECRET", "expected-secret")
clear_settings_cache()
monkeypatch.setattr("glitchup_bot.api.webhook.process_webhook_payload", fake_process)
transport = ASGITransport(app=app)
async with AsyncClient(transport=transport, base_url="http://testserver") as client:
response = await client.post(
"/webhooks/glitchtip",
headers={"X-Webhook-Secret": "expected-secret"},
json={"text": "GlitchTip Alert", "attachments": []},
)
assert response.status_code == 200
assert received == [{"text": "GlitchTip Alert", "attachments": []}]

30
tests/test_config.py Normal file
View File

@@ -0,0 +1,30 @@
from glitchup_bot.config import get_settings
def test_settings_parse_lists_and_groups(monkeypatch):
monkeypatch.setenv("BACKEND_PROJECTS", "api-production,worker-production")
monkeypatch.setenv("FRONTEND_PROJECTS", "web-production")
monkeypatch.setenv("BACKEND_SUBSCRIBERS", "1,2")
monkeypatch.setenv("FRONTEND_SUBSCRIBERS", "7,8")
monkeypatch.setenv("TELEGRAM_ADMIN_IDS", "5,6")
monkeypatch.setenv("ALERT_ENVIRONMENTS", "production,hotfix")
monkeypatch.setenv("SYNC_INTERVAL_MINUTES", "45")
monkeypatch.setenv("ALERT_RATE_LIMIT_COUNT", "7")
monkeypatch.setenv("ALERT_RATE_LIMIT_WINDOW_MINUTES", "20")
settings = get_settings()
assert settings.backend_projects == ["api-production", "worker-production"]
assert settings.frontend_projects == ["web-production"]
assert settings.backend_subscribers == [1, 2]
assert settings.frontend_subscribers == [7, 8]
assert settings.telegram_admin_ids == [5, 6]
assert settings.alert_environments == ["production", "hotfix"]
assert settings.sync_interval_minutes == 45
assert settings.alert_rate_limit_count == 7
assert settings.alert_rate_limit_window_minutes == 20
assert settings.get_environment("api-production") == "production"
assert settings.get_group("web-production") == "frontend"
assert settings.get_group("unknown-project") == "backend"
assert settings.is_alert_environment("api-production") is True
assert settings.is_admin(5) is True

View File

@@ -0,0 +1,227 @@
from datetime import UTC, datetime, timedelta
import pytest
from glitchup_bot.services import digest_builder
from glitchup_bot.services.sync_service import IssueSnapshot
@pytest.mark.asyncio
async def test_build_digest_aggregates_projects(monkeypatch):
now = datetime.now(UTC)
issues = [
IssueSnapshot(
1,
"backend-production",
"New backend issue",
None,
"error",
"unresolved",
now - timedelta(days=1),
now,
12,
True,
None,
"2026.03.20",
),
IssueSnapshot(
2,
"frontend-production",
"Old frontend issue",
None,
"error",
"unresolved",
now - timedelta(days=10),
now,
3,
False,
None,
None,
),
]
monkeypatch.setattr(
digest_builder, "_load_issues", lambda *args, **kwargs: _async_value(issues)
)
text = await digest_builder.build_digest()
assert "новых issues: 1" in text
assert "regressions: 1" in text
assert "unresolved > 7 дней: 1" in text
assert "backend-production" in text
assert "Old frontend issue" in text
assert "2026.03.20" in text
@pytest.mark.asyncio
async def test_build_today_summary_limits_to_today(monkeypatch):
now = datetime.now(UTC)
issues = [
IssueSnapshot(
1,
"backend-production",
"Today issue",
None,
"error",
"unresolved",
now - timedelta(hours=2),
now,
2,
False,
None,
None,
),
IssueSnapshot(
2,
"backend-production",
"Old issue",
None,
"error",
"unresolved",
now - timedelta(days=2),
now,
1,
False,
None,
None,
),
]
monkeypatch.setattr(
digest_builder, "_load_issues", lambda *args, **kwargs: _async_value(issues)
)
text = await digest_builder.build_today_summary()
assert "Сегодня: 1 новых issues" in text
assert "Today issue" in text
assert "Old issue" not in text
@pytest.mark.asyncio
async def test_build_project_summary(monkeypatch):
now = datetime.now(UTC)
issues = [
IssueSnapshot(
1,
"backend-production",
"Project issue",
None,
"error",
"unresolved",
now,
now,
5,
False,
"https://glitchtip.example.com/issues/1",
None,
)
]
monkeypatch.setattr(
digest_builder, "_load_issues", lambda *args, **kwargs: _async_value(issues)
)
text = await digest_builder.build_project_summary("backend-production")
assert "backend-production" in text
assert "Project issue" in text
assert "5 событий" in text
@pytest.mark.asyncio
async def test_build_top_and_stale(monkeypatch):
now = datetime.now(UTC)
issues = [
IssueSnapshot(
1,
"backend-production",
"Loud issue",
None,
"error",
"unresolved",
now - timedelta(days=9),
now,
99,
False,
None,
None,
),
IssueSnapshot(
2,
"frontend-production",
"Quiet issue",
None,
"error",
"unresolved",
now - timedelta(days=1),
now,
3,
False,
None,
None,
),
]
monkeypatch.setattr(
digest_builder, "_load_issues", lambda *args, **kwargs: _async_value(issues)
)
top_text = await digest_builder.build_top_issues()
stale_text = await digest_builder.build_stale_issues()
assert "Loud issue" in top_text
assert "99" in top_text
assert "Loud issue" in stale_text
assert "9 дн." in stale_text
@pytest.mark.asyncio
async def test_build_release_summary_and_detail(monkeypatch):
now = datetime.now(UTC)
issues = [
IssueSnapshot(
1,
"backend-production",
"Release issue",
None,
"error",
"unresolved",
now,
now,
15,
True,
None,
"2026.03.27",
),
IssueSnapshot(
2,
"frontend-production",
"Another issue",
None,
"error",
"unresolved",
now,
now,
5,
False,
None,
"2026.03.27",
),
]
monkeypatch.setattr(
digest_builder, "_load_issues", lambda *args, **kwargs: _async_value(issues)
)
summary = await digest_builder.build_release_summary()
detail = await digest_builder.build_release_detail("2026.03.27")
assert "2026.03.27" in summary
assert "2 issues" in summary
assert "Release issue" in detail
async def _async_value(value):
return value

View File

@@ -0,0 +1,70 @@
import pytest
from glitchup_bot.api.schemas import WebhookAttachment
from glitchup_bot.services import telegram_sender
class FakeBot:
def __init__(self) -> None:
self.calls: list[dict] = []
async def send_message(self, **kwargs) -> None:
self.calls.append(kwargs)
@pytest.mark.asyncio
async def test_send_alert_routes_to_topic_and_subscribers(monkeypatch):
fake_bot = FakeBot()
monkeypatch.setattr(telegram_sender, "get_bot", lambda: fake_bot)
monkeypatch.setattr(
telegram_sender, "resolve_group", lambda project_slug: _async_value("backend")
)
monkeypatch.setattr(telegram_sender, "resolve_topic_id", lambda group_name: _async_value(11))
monkeypatch.setattr(
telegram_sender,
"resolve_subscribers",
lambda group_name: _async_value([101, 202]),
)
await telegram_sender.send_alert(
WebhookAttachment(
title="<boom>",
text="service <api>",
title_link="https://glitchtip.example.com/issues/1?x=<x>",
color="#e52b50",
),
project_slug="backend-production",
priority="P1",
release_name="2026.03.27",
)
assert len(fake_bot.calls) == 3
assert fake_bot.calls[0]["chat_id"] == -1001234567890
assert fake_bot.calls[0]["message_thread_id"] == 11
assert "&lt;boom&gt;" in fake_bot.calls[0]["text"]
assert "service &lt;api&gt;" in fake_bot.calls[0]["text"]
assert "2026.03.27" in fake_bot.calls[0]["text"]
assert fake_bot.calls[1]["chat_id"] == 101
assert fake_bot.calls[2]["chat_id"] == 202
@pytest.mark.asyncio
async def test_send_digest_message_uses_digest_topic(monkeypatch):
fake_bot = FakeBot()
monkeypatch.setattr(telegram_sender, "get_bot", lambda: fake_bot)
monkeypatch.setattr(telegram_sender, "resolve_topic_id", lambda group_name: _async_value(33))
await telegram_sender.send_digest_message("<b>digest</b>")
assert fake_bot.calls == [
{
"chat_id": -1001234567890,
"message_thread_id": 33,
"text": "<b>digest</b>",
"disable_web_page_preview": True,
}
]
async def _async_value(value):
return value

1143
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff