Initial commit
This commit is contained in:
0
backend/migrations/__init__.py
Normal file
0
backend/migrations/__init__.py
Normal file
107
backend/migrations/env.py
Normal file
107
backend/migrations/env.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import asyncio
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
|
||||
|
||||
from advanced_alchemy.base import metadata_registry
|
||||
from alembic import context
|
||||
from alembic.autogenerate import rewriter
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sqlalchemy.engine import Connection
|
||||
|
||||
from advanced_alchemy.alembic.commands import AlembicCommandConfig
|
||||
|
||||
__all__ = ("do_run_migrations", "run_migrations_offline", "run_migrations_online")
|
||||
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config: "AlembicCommandConfig" = context.config # type: ignore
|
||||
writer = rewriter.Rewriter()
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
"""
|
||||
context.configure(
|
||||
url=config.db_url,
|
||||
target_metadata=metadata_registry.get(config.bind_key),
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
compare_type=config.compare_type,
|
||||
version_table=config.version_table_name,
|
||||
version_table_pk=config.version_table_pk,
|
||||
user_module_prefix=config.user_module_prefix,
|
||||
render_as_batch=config.render_as_batch,
|
||||
process_revision_directives=writer,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: "Connection") -> None:
|
||||
"""Run migrations."""
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=metadata_registry.get(config.bind_key),
|
||||
compare_type=config.compare_type,
|
||||
version_table=config.version_table_name,
|
||||
version_table_pk=config.version_table_pk,
|
||||
user_module_prefix=config.user_module_prefix,
|
||||
render_as_batch=config.render_as_batch,
|
||||
process_revision_directives=writer,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine and associate a
|
||||
connection with the context.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the engine cannot be created from the config.
|
||||
"""
|
||||
configuration = config.get_section(config.config_ini_section) or {}
|
||||
configuration["sqlalchemy.url"] = config.db_url
|
||||
|
||||
connectable = cast(
|
||||
"AsyncEngine",
|
||||
config.engine
|
||||
or async_engine_from_config(
|
||||
configuration,
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
future=True,
|
||||
),
|
||||
)
|
||||
if connectable is None: # pyright: ignore[reportUnnecessaryComparison]
|
||||
msg = "Could not get engine from config. Please ensure your `alembic.ini` according to the official Alembic documentation."
|
||||
raise RuntimeError(
|
||||
msg,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
||||
72
backend/migrations/script.py.mako
Normal file
72
backend/migrations/script.py.mako
Normal file
@@ -0,0 +1,72 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC, StoredObject, PasswordHash, FernetBackend
|
||||
from advanced_alchemy.types.encrypted_string import PGCryptoBackend
|
||||
from advanced_alchemy.types.password_hash.argon2 import Argon2Hasher
|
||||
from advanced_alchemy.types.password_hash.passlib import PasslibHasher
|
||||
from advanced_alchemy.types.password_hash.pwdlib import PwdlibHasher
|
||||
from sqlalchemy import Text # noqa: F401
|
||||
${imports if imports else ""}
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
|
||||
|
||||
sa.GUID = GUID
|
||||
sa.DateTimeUTC = DateTimeUTC
|
||||
sa.ORA_JSONB = ORA_JSONB
|
||||
sa.EncryptedString = EncryptedString
|
||||
sa.EncryptedText = EncryptedText
|
||||
sa.StoredObject = StoredObject
|
||||
sa.PasswordHash = PasswordHash
|
||||
sa.Argon2Hasher = Argon2Hasher
|
||||
sa.PasslibHasher = PasslibHasher
|
||||
sa.PwdlibHasher = PwdlibHasher
|
||||
sa.FernetBackend = FernetBackend
|
||||
sa.PGCryptoBackend = PGCryptoBackend
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=UserWarning)
|
||||
with op.get_context().autocommit_block():
|
||||
schema_upgrades()
|
||||
data_upgrades()
|
||||
|
||||
def downgrade() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=UserWarning)
|
||||
with op.get_context().autocommit_block():
|
||||
data_downgrades()
|
||||
schema_downgrades()
|
||||
|
||||
def schema_upgrades() -> None:
|
||||
"""schema upgrade migrations go here."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
def schema_downgrades() -> None:
|
||||
"""schema downgrade migrations go here."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
def data_upgrades() -> None:
|
||||
"""Add any optional data upgrade migrations here!"""
|
||||
|
||||
def data_downgrades() -> None:
|
||||
"""Add any optional data downgrade migrations here!"""
|
||||
@@ -0,0 +1,65 @@
|
||||
"""Add pg_trgm extension
|
||||
|
||||
Revision ID: 26022ec86f32
|
||||
Revises:
|
||||
Create Date: 2025-10-31 18:45:55.027462
|
||||
|
||||
"""
|
||||
|
||||
import warnings
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC, StoredObject, PasswordHash
|
||||
from sqlalchemy import Text # noqa: F401
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
|
||||
|
||||
sa.GUID = GUID
|
||||
sa.DateTimeUTC = DateTimeUTC
|
||||
sa.ORA_JSONB = ORA_JSONB
|
||||
sa.EncryptedString = EncryptedString
|
||||
sa.EncryptedText = EncryptedText
|
||||
sa.StoredObject = StoredObject
|
||||
sa.PasswordHash = PasswordHash
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '26022ec86f32'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.execute(sa.text('create EXTENSION if not EXISTS "pgcrypto"'))
|
||||
op.execute(sa.text('create EXTENSION if not EXISTS "pg_trgm"'))
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=UserWarning)
|
||||
with op.get_context().autocommit_block():
|
||||
schema_upgrades()
|
||||
data_upgrades()
|
||||
|
||||
def downgrade() -> None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=UserWarning)
|
||||
with op.get_context().autocommit_block():
|
||||
data_downgrades()
|
||||
schema_downgrades()
|
||||
|
||||
def schema_upgrades() -> None:
|
||||
"""schema upgrade migrations go here."""
|
||||
pass
|
||||
|
||||
def schema_downgrades() -> None:
|
||||
"""schema downgrade migrations go here."""
|
||||
pass
|
||||
|
||||
def data_upgrades() -> None:
|
||||
"""Add any optional data upgrade migrations here!"""
|
||||
|
||||
def data_downgrades() -> None:
|
||||
"""Add any optional data downgrade migrations here!"""
|
||||
0
backend/migrations/versions/__init__.py
Normal file
0
backend/migrations/versions/__init__.py
Normal file
Reference in New Issue
Block a user