initial commit
Some checks failed
CI / Run tests (push) Has been cancelled
CI / Docker build test (push) Has been cancelled
CI / Lint (ruff + mypy) (push) Has been cancelled

This commit is contained in:
2026-03-30 16:46:26 +07:00
commit 2a7dfa95c8
67 changed files with 5864 additions and 0 deletions

57
migrations/env.py Normal file
View File

@@ -0,0 +1,57 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import async_engine_from_config
from glitchup_bot.config import settings
from glitchup_bot.models import Base
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
config.set_main_option("sqlalchemy.url", settings.database_url)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

View File

@@ -0,0 +1,103 @@
"""initial tables
Revision ID: 20260327_0001
Revises:
Create Date: 2026-03-27 00:00:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "20260327_0001"
down_revision: str | None = None
branch_labels: Sequence[str] | None = None
depends_on: Sequence[str] | None = None
def upgrade() -> None:
op.create_table(
"issues_cache",
sa.Column("id", sa.BigInteger(), primary_key=True, autoincrement=True),
sa.Column("glitchtip_issue_id", sa.BigInteger(), nullable=False),
sa.Column("project_slug", sa.String(length=255), nullable=False),
sa.Column("title", sa.Text(), nullable=False),
sa.Column("culprit", sa.Text(), nullable=True),
sa.Column("level", sa.String(length=50), nullable=False),
sa.Column("status", sa.String(length=50), nullable=False),
sa.Column("first_seen", sa.DateTime(timezone=True), nullable=True),
sa.Column("last_seen", sa.DateTime(timezone=True), nullable=True),
sa.Column("event_count", sa.Integer(), nullable=False),
sa.Column("is_regression", sa.Boolean(), nullable=False),
sa.Column("link", sa.Text(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(
op.f("ix_issues_cache_glitchtip_issue_id"),
"issues_cache",
["glitchtip_issue_id"],
unique=True,
)
op.create_index(
op.f("ix_issues_cache_project_slug"), "issues_cache", ["project_slug"], unique=False
)
op.create_table(
"notifications_sent",
sa.Column("id", sa.BigInteger(), primary_key=True, autoincrement=True),
sa.Column("issue_id", sa.BigInteger(), nullable=False),
sa.Column("notification_type", sa.String(length=50), nullable=False),
sa.Column("fingerprint", sa.String(length=255), nullable=False),
sa.Column(
"sent_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False
),
)
op.create_index(
op.f("ix_notifications_sent_fingerprint"),
"notifications_sent",
["fingerprint"],
unique=False,
)
op.create_index(
op.f("ix_notifications_sent_issue_id"), "notifications_sent", ["issue_id"], unique=False
)
op.create_table(
"sync_state",
sa.Column("id", sa.BigInteger(), primary_key=True, autoincrement=True),
sa.Column("source", sa.String(length=100), nullable=False),
sa.Column("last_successful_at", sa.DateTime(timezone=True), nullable=True),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(op.f("ix_sync_state_source"), "sync_state", ["source"], unique=True)
def downgrade() -> None:
op.drop_index(op.f("ix_sync_state_source"), table_name="sync_state")
op.drop_table("sync_state")
op.drop_index(op.f("ix_notifications_sent_issue_id"), table_name="notifications_sent")
op.drop_index(op.f("ix_notifications_sent_fingerprint"), table_name="notifications_sent")
op.drop_table("notifications_sent")
op.drop_index(op.f("ix_issues_cache_project_slug"), table_name="issues_cache")
op.drop_index(op.f("ix_issues_cache_glitchtip_issue_id"), table_name="issues_cache")
op.drop_table("issues_cache")

View File

@@ -0,0 +1,162 @@
"""runtime features
Revision ID: 20260327_0002
Revises: 20260327_0001
Create Date: 2026-03-27 00:30:00.000000
"""
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "20260327_0002"
down_revision: str | None = "20260327_0001"
branch_labels: Sequence[str] | None = None
depends_on: Sequence[str] | None = None
def upgrade() -> None:
op.add_column("issues_cache", sa.Column("release", sa.String(length=255), nullable=True))
op.add_column(
"notifications_sent", sa.Column("project_slug", sa.String(length=255), nullable=True)
)
op.add_column(
"notifications_sent", sa.Column("group_name", sa.String(length=50), nullable=True)
)
op.add_column("notifications_sent", sa.Column("priority", sa.String(length=20), nullable=True))
op.add_column(
"notifications_sent",
sa.Column("delivery_status", sa.String(length=50), nullable=False, server_default="sent"),
)
op.create_index(
op.f("ix_notifications_sent_group_name"),
"notifications_sent",
["group_name"],
unique=False,
)
op.create_table(
"mute_rules",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("pattern", sa.Text(), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False, server_default=sa.text("true")),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_unique_constraint("uq_mute_rules_pattern", "mute_rules", ["pattern"])
op.create_table(
"project_ownership_overrides",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("project_slug", sa.String(length=255), nullable=False),
sa.Column("group_name", sa.String(length=50), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(
op.f("ix_project_ownership_overrides_project_slug"),
"project_ownership_overrides",
["project_slug"],
unique=True,
)
op.create_table(
"group_topic_overrides",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("group_name", sa.String(length=50), nullable=False),
sa.Column("topic_id", sa.Integer(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
op.create_index(
op.f("ix_group_topic_overrides_group_name"),
"group_topic_overrides",
["group_name"],
unique=True,
)
op.create_table(
"group_subscriber_overrides",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("group_name", sa.String(length=50), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.UniqueConstraint("group_name", "user_id", name="uq_group_subscriber"),
)
op.create_index(
op.f("ix_group_subscriber_overrides_group_name"),
"group_subscriber_overrides",
["group_name"],
unique=False,
)
op.create_index(
op.f("ix_group_subscriber_overrides_user_id"),
"group_subscriber_overrides",
["user_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index(
op.f("ix_group_subscriber_overrides_user_id"),
table_name="group_subscriber_overrides",
)
op.drop_index(
op.f("ix_group_subscriber_overrides_group_name"),
table_name="group_subscriber_overrides",
)
op.drop_table("group_subscriber_overrides")
op.drop_index(op.f("ix_group_topic_overrides_group_name"), table_name="group_topic_overrides")
op.drop_table("group_topic_overrides")
op.drop_index(
op.f("ix_project_ownership_overrides_project_slug"),
table_name="project_ownership_overrides",
)
op.drop_table("project_ownership_overrides")
op.drop_constraint("uq_mute_rules_pattern", "mute_rules", type_="unique")
op.drop_table("mute_rules")
op.drop_index(op.f("ix_notifications_sent_group_name"), table_name="notifications_sent")
op.drop_column("notifications_sent", "delivery_status")
op.drop_column("notifications_sent", "priority")
op.drop_column("notifications_sent", "group_name")
op.drop_column("notifications_sent", "project_slug")
op.drop_column("issues_cache", "release")