chore: add migration for initial db tables
This commit is contained in:
1
backend/migrations/README
Normal file
1
backend/migrations/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Asynchronous SQLAlchemy configuration with Advanced Alchemy.
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from typing import TYPE_CHECKING, cast
|
from typing import TYPE_CHECKING, cast
|
||||||
|
|
||||||
|
from alembic.autogenerate import rewriter
|
||||||
from sqlalchemy import pool
|
from sqlalchemy import pool
|
||||||
from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
|
from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
|
||||||
|
|
||||||
from advanced_alchemy.base import metadata_registry
|
from advanced_alchemy.base import metadata_registry
|
||||||
from alembic import context
|
from alembic import context
|
||||||
from alembic.autogenerate import rewriter
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from sqlalchemy.engine import Connection
|
from sqlalchemy.engine import Connection
|
||||||
|
|||||||
@@ -0,0 +1,222 @@
|
|||||||
|
"""create initial tables
|
||||||
|
|
||||||
|
Revision ID: 43bdf42a4f6c
|
||||||
|
Revises:
|
||||||
|
Create Date: 2026-03-08 16:00:54.727868
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC, StoredObject, PasswordHash, FernetBackend
|
||||||
|
from advanced_alchemy.types.encrypted_string import PGCryptoBackend
|
||||||
|
from advanced_alchemy.types.password_hash.argon2 import Argon2Hasher
|
||||||
|
from advanced_alchemy.types.password_hash.passlib import PasslibHasher
|
||||||
|
from advanced_alchemy.types.password_hash.pwdlib import PwdlibHasher
|
||||||
|
from pwdlib.hashers.argon2 import Argon2Hasher as PwdlibArgon2Hasher
|
||||||
|
from sqlalchemy import Text # noqa: F401
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
|
||||||
|
|
||||||
|
sa.GUID = GUID
|
||||||
|
sa.DateTimeUTC = DateTimeUTC
|
||||||
|
sa.ORA_JSONB = ORA_JSONB
|
||||||
|
sa.EncryptedString = EncryptedString
|
||||||
|
sa.EncryptedText = EncryptedText
|
||||||
|
sa.StoredObject = StoredObject
|
||||||
|
sa.PasswordHash = PasswordHash
|
||||||
|
sa.Argon2Hasher = Argon2Hasher
|
||||||
|
sa.PasslibHasher = PasslibHasher
|
||||||
|
sa.PwdlibHasher = PwdlibHasher
|
||||||
|
sa.FernetBackend = FernetBackend
|
||||||
|
sa.PGCryptoBackend = PGCryptoBackend
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '43bdf42a4f6c'
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings("ignore", category=UserWarning)
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
schema_upgrades()
|
||||||
|
data_upgrades()
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.filterwarnings("ignore", category=UserWarning)
|
||||||
|
with op.get_context().autocommit_block():
|
||||||
|
data_downgrades()
|
||||||
|
schema_downgrades()
|
||||||
|
|
||||||
|
def schema_upgrades() -> None:
|
||||||
|
"""schema upgrade migrations go here."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('users',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('email', sa.String(), nullable=False),
|
||||||
|
sa.Column('password', sa.PasswordHash(backend=sa.PwdlibHasher(PwdlibArgon2Hasher()), length=128), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_users')),
|
||||||
|
sa.UniqueConstraint('email', name=op.f('uq_users_email'))
|
||||||
|
)
|
||||||
|
op.create_table('book_lists',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('library_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
|
||||||
|
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('title', sa.String(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['library_id'], ['libraries.id'], name=op.f('fk_book_lists_library_id_libraries')),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_book_lists_user_id_users')),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_book_lists'))
|
||||||
|
)
|
||||||
|
op.create_table('books',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('library_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('title', sa.String(), nullable=False),
|
||||||
|
sa.Column('subtitle', sa.String(), nullable=True),
|
||||||
|
sa.Column('description', sa.String(), nullable=True),
|
||||||
|
sa.Column('published_date', sa.Date(), nullable=True),
|
||||||
|
sa.Column('language', sa.String(), nullable=True),
|
||||||
|
sa.Column('pages', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('cover_image', sa.String(), nullable=True),
|
||||||
|
sa.Column('edition', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('path', sa.String(), nullable=True),
|
||||||
|
sa.Column('publisher_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
|
||||||
|
sa.Column('series_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
|
||||||
|
sa.Column('series_position', sa.String(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['library_id'], ['libraries.id'], name=op.f('fk_books_library_id_libraries')),
|
||||||
|
sa.ForeignKeyConstraint(['publisher_id'], ['publishers.id'], name=op.f('fk_books_publisher_id_publishers')),
|
||||||
|
sa.ForeignKeyConstraint(['series_id'], ['book_series.id'], name=op.f('fk_books_series_id_book_series')),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_books'))
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('books', schema=None) as batch_op:
|
||||||
|
batch_op.create_index('ix_books_title_trigram', ['title'], unique=False, postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'})
|
||||||
|
|
||||||
|
op.create_table('devices',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('api_key', sa.String(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_devices_user_id_users')),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_devices')),
|
||||||
|
sa.UniqueConstraint('api_key', name=op.f('uq_devices_api_key'))
|
||||||
|
)
|
||||||
|
op.create_table('book_author_links',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('author_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('position', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['author_id'], ['authors.id'], name=op.f('fk_book_author_links_author_id_authors')),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_author_links_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.PrimaryKeyConstraint('id', 'book_id', 'author_id', name=op.f('pk_book_author_links'))
|
||||||
|
)
|
||||||
|
op.create_table('book_list_links',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('list_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('position', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_list_links_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.ForeignKeyConstraint(['list_id'], ['book_lists.id'], name=op.f('fk_book_list_links_list_id_book_lists')),
|
||||||
|
sa.PrimaryKeyConstraint('id', 'book_id', 'list_id', name=op.f('pk_book_list_links'))
|
||||||
|
)
|
||||||
|
op.create_table('book_progress',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('epub_cfi', sa.String(), nullable=True),
|
||||||
|
sa.Column('epub_xpointer', sa.String(), nullable=True),
|
||||||
|
sa.Column('pdf_page', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('percentage', sa.Float(), nullable=False),
|
||||||
|
sa.Column('completed', sa.Boolean(), nullable=True),
|
||||||
|
sa.Column('device', sa.String(), nullable=True),
|
||||||
|
sa.Column('device_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_progress_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_book_progress_user_id_users'), ondelete='cascade'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_book_progress'))
|
||||||
|
)
|
||||||
|
op.create_table('book_tag_link',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('tag_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('position', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_tag_link_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], name=op.f('fk_book_tag_link_tag_id_tags')),
|
||||||
|
sa.PrimaryKeyConstraint('id', 'book_id', 'tag_id', name=op.f('pk_book_tag_link'))
|
||||||
|
)
|
||||||
|
op.create_table('file_metadata',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('hash', sa.String(), nullable=False),
|
||||||
|
sa.Column('path', sa.String(), nullable=False),
|
||||||
|
sa.Column('size', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('content_type', sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_file_metadata_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_file_metadata'))
|
||||||
|
)
|
||||||
|
op.create_table('identifiers',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('name', sa.String(), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('value', sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_identifiers_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.PrimaryKeyConstraint('id', 'name', 'book_id', name=op.f('pk_identifiers'))
|
||||||
|
)
|
||||||
|
op.create_table('kosync_progress',
|
||||||
|
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
|
||||||
|
sa.Column('document', sa.String(), nullable=False),
|
||||||
|
sa.Column('progress', sa.String(), nullable=True),
|
||||||
|
sa.Column('percentage', sa.Float(), nullable=True),
|
||||||
|
sa.Column('device', sa.String(), nullable=True),
|
||||||
|
sa.Column('device_id', sa.String(), nullable=True),
|
||||||
|
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_kosync_progress_book_id_books'), ondelete='cascade'),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_kosync_progress_user_id_users'), ondelete='cascade'),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_kosync_progress'))
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
def schema_downgrades() -> None:
|
||||||
|
"""schema downgrade migrations go here."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('kosync_progress')
|
||||||
|
op.drop_table('identifiers')
|
||||||
|
op.drop_table('file_metadata')
|
||||||
|
op.drop_table('book_tag_link')
|
||||||
|
op.drop_table('book_progress')
|
||||||
|
op.drop_table('book_list_links')
|
||||||
|
op.drop_table('book_author_links')
|
||||||
|
op.drop_table('devices')
|
||||||
|
with op.batch_alter_table('books', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index('ix_books_title_trigram', postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'})
|
||||||
|
|
||||||
|
op.drop_table('books')
|
||||||
|
op.drop_table('book_lists')
|
||||||
|
op.drop_table('users')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
def data_upgrades() -> None:
|
||||||
|
"""Add any optional data upgrade migrations here!"""
|
||||||
|
|
||||||
|
def data_downgrades() -> None:
|
||||||
|
"""Add any optional data downgrade migrations here!"""
|
||||||
Reference in New Issue
Block a user