Compare commits

...

15 Commits

Author SHA1 Message Date
9711c68fbb chore: add migration for initial db tables 2026-03-09 14:28:35 -04:00
20df5ea140 remove python313Full package as it is no longer available in nixos 25.11 2026-03-09 14:26:33 -04:00
2e1d16ef9e refactor: move db and file watcher setup to use async context manager 2026-03-09 14:25:24 -04:00
2de0aac23a chore: update dependencies and ignore new warning in tests
ignore JWT related warning that was introduced when updating
dependencies related to JWT secret size (test value of "secret" is too
short).
2026-03-09 14:21:37 -04:00
10eaf84329 refactor: simplify settings page layout 2026-03-09 14:19:59 -04:00
a3e49dc918 regenerate OpenAPI schema with new KOSync models 2026-03-09 14:19:50 -04:00
3072f72958 feat: add device settings page
- Add device CRUD operations (create, delete, regenerate api keys)
- Add API key visibility toggle and copy button
2026-03-09 14:16:53 -04:00
51c1900d8c feat: add KOSync server
- Add KOSync device management
- Add API key auth middleware for devices to authenticate
- Add KOSync-compatible progress sync endpoints
- Add basic tests for KOSync compatible hashes
2026-03-09 14:11:21 -04:00
20a69de968 feat: add KOReader compatible hash to file metadata
Implement KOReader's partial MD5 algorithm for document identification. This hash allows KOReader devices to match local files with server records for reading progress synchronization (KOSync).
2026-03-09 13:38:07 -04:00
930dbe9ba4 update dependencies
Migrate to the new SvelteKit invalid() API from the deprecated issue() api
for forms using remote functions.
2026-03-08 16:24:19 -04:00
c67ca0e1df Handle optional book.path to support books without files
Books may not have a path (e.g., physical books, metadata-only entries).
Updated path-dependent operations to handle None gracefully:

- get_file: raise ValueError if book has no path
- update_book: skip path relocation if no path exists
- remove_files: skip filesystem cleanup if no path exists

Also fixed _save_book_files return type and removed unused imports.
2026-03-07 12:38:04 -05:00
3a5ea1d158 fix typecheck errors in test_book_service.py 2026-03-07 12:17:29 -05:00
8117f0dbfe rename BookService CRUD overrides to domain-specific methods
The create, update, and delete methods had incompatible signatures
resulting in typecheck errors. Renamed to create_book, update_book, and
delete_books.
2026-03-07 11:58:24 -05:00
67fab3f9c6 fix update progress test to match new progress schema
"progress" field was renamed to "percentage"
2026-03-07 11:44:08 -05:00
a19c944b6e Fix hardcoded absolute paths in book upload tests
The Darwin epub test cases used absolute paths. I hadn't notices until switching machines, whcih caused errors in the test harness. Changed to relative paths to be consistent with other test cases.
2026-03-07 11:35:30 -05:00
39 changed files with 3861 additions and 2018 deletions

View File

@@ -0,0 +1 @@
Asynchronous SQLAlchemy configuration with Advanced Alchemy.

View File

@@ -1,12 +1,12 @@
import asyncio import asyncio
from typing import TYPE_CHECKING, cast from typing import TYPE_CHECKING, cast
from alembic.autogenerate import rewriter
from sqlalchemy import pool from sqlalchemy import pool
from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
from advanced_alchemy.base import metadata_registry from advanced_alchemy.base import metadata_registry
from alembic import context from alembic import context
from alembic.autogenerate import rewriter
if TYPE_CHECKING: if TYPE_CHECKING:
from sqlalchemy.engine import Connection from sqlalchemy.engine import Connection

View File

@@ -0,0 +1,222 @@
"""create initial tables
Revision ID: 43bdf42a4f6c
Revises:
Create Date: 2026-03-08 16:00:54.727868
"""
import warnings
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC, StoredObject, PasswordHash, FernetBackend
from advanced_alchemy.types.encrypted_string import PGCryptoBackend
from advanced_alchemy.types.password_hash.argon2 import Argon2Hasher
from advanced_alchemy.types.password_hash.passlib import PasslibHasher
from advanced_alchemy.types.password_hash.pwdlib import PwdlibHasher
from pwdlib.hashers.argon2 import Argon2Hasher as PwdlibArgon2Hasher
from sqlalchemy import Text # noqa: F401
if TYPE_CHECKING:
from collections.abc import Sequence
__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
sa.GUID = GUID
sa.DateTimeUTC = DateTimeUTC
sa.ORA_JSONB = ORA_JSONB
sa.EncryptedString = EncryptedString
sa.EncryptedText = EncryptedText
sa.StoredObject = StoredObject
sa.PasswordHash = PasswordHash
sa.Argon2Hasher = Argon2Hasher
sa.PasslibHasher = PasslibHasher
sa.PwdlibHasher = PwdlibHasher
sa.FernetBackend = FernetBackend
sa.PGCryptoBackend = PGCryptoBackend
# revision identifiers, used by Alembic.
revision = '43bdf42a4f6c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
with op.get_context().autocommit_block():
schema_upgrades()
data_upgrades()
def downgrade() -> None:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
with op.get_context().autocommit_block():
data_downgrades()
schema_downgrades()
def schema_upgrades() -> None:
"""schema upgrade migrations go here."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('password', sa.PasswordHash(backend=sa.PwdlibHasher(PwdlibArgon2Hasher()), length=128), nullable=False),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_users')),
sa.UniqueConstraint('email', name=op.f('uq_users_email'))
)
op.create_table('book_lists',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('library_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['library_id'], ['libraries.id'], name=op.f('fk_book_lists_library_id_libraries')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_book_lists_user_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_book_lists'))
)
op.create_table('books',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('library_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('subtitle', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('published_date', sa.Date(), nullable=True),
sa.Column('language', sa.String(), nullable=True),
sa.Column('pages', sa.Integer(), nullable=True),
sa.Column('cover_image', sa.String(), nullable=True),
sa.Column('edition', sa.Integer(), nullable=True),
sa.Column('path', sa.String(), nullable=True),
sa.Column('publisher_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
sa.Column('series_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
sa.Column('series_position', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['library_id'], ['libraries.id'], name=op.f('fk_books_library_id_libraries')),
sa.ForeignKeyConstraint(['publisher_id'], ['publishers.id'], name=op.f('fk_books_publisher_id_publishers')),
sa.ForeignKeyConstraint(['series_id'], ['book_series.id'], name=op.f('fk_books_series_id_book_series')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_books'))
)
with op.batch_alter_table('books', schema=None) as batch_op:
batch_op.create_index('ix_books_title_trigram', ['title'], unique=False, postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'})
op.create_table('devices',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('api_key', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_devices_user_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_devices')),
sa.UniqueConstraint('api_key', name=op.f('uq_devices_api_key'))
)
op.create_table('book_author_links',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('author_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['author_id'], ['authors.id'], name=op.f('fk_book_author_links_author_id_authors')),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_author_links_book_id_books'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', 'book_id', 'author_id', name=op.f('pk_book_author_links'))
)
op.create_table('book_list_links',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('list_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_list_links_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['list_id'], ['book_lists.id'], name=op.f('fk_book_list_links_list_id_book_lists')),
sa.PrimaryKeyConstraint('id', 'book_id', 'list_id', name=op.f('pk_book_list_links'))
)
op.create_table('book_progress',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('epub_cfi', sa.String(), nullable=True),
sa.Column('epub_xpointer', sa.String(), nullable=True),
sa.Column('pdf_page', sa.Integer(), nullable=True),
sa.Column('percentage', sa.Float(), nullable=False),
sa.Column('completed', sa.Boolean(), nullable=True),
sa.Column('device', sa.String(), nullable=True),
sa.Column('device_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_progress_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_book_progress_user_id_users'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_book_progress'))
)
op.create_table('book_tag_link',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('tag_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_tag_link_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], name=op.f('fk_book_tag_link_tag_id_tags')),
sa.PrimaryKeyConstraint('id', 'book_id', 'tag_id', name=op.f('pk_book_tag_link'))
)
op.create_table('file_metadata',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('hash', sa.String(), nullable=False),
sa.Column('path', sa.String(), nullable=False),
sa.Column('size', sa.Integer(), nullable=False),
sa.Column('content_type', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_file_metadata_book_id_books'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_file_metadata'))
)
op.create_table('identifiers',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('value', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_identifiers_book_id_books'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', 'name', 'book_id', name=op.f('pk_identifiers'))
)
op.create_table('kosync_progress',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('document', sa.String(), nullable=False),
sa.Column('progress', sa.String(), nullable=True),
sa.Column('percentage', sa.Float(), nullable=True),
sa.Column('device', sa.String(), nullable=True),
sa.Column('device_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_kosync_progress_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_kosync_progress_user_id_users'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_kosync_progress'))
)
# ### end Alembic commands ###
def schema_downgrades() -> None:
"""schema downgrade migrations go here."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('kosync_progress')
op.drop_table('identifiers')
op.drop_table('file_metadata')
op.drop_table('book_tag_link')
op.drop_table('book_progress')
op.drop_table('book_list_links')
op.drop_table('book_author_links')
op.drop_table('devices')
with op.batch_alter_table('books', schema=None) as batch_op:
batch_op.drop_index('ix_books_title_trigram', postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'})
op.drop_table('books')
op.drop_table('book_lists')
op.drop_table('users')
# ### end Alembic commands ###
def data_upgrades() -> None:
"""Add any optional data upgrade migrations here!"""
def data_downgrades() -> None:
"""Add any optional data downgrade migrations here!"""

View File

@@ -8,7 +8,7 @@ authors = [
] ]
requires-python = ">=3.13" requires-python = ">=3.13"
dependencies = [ dependencies = [
"advanced-alchemy==1.8.0", "advanced-alchemy>=1.8.0",
"aiofiles>=24.1.0", "aiofiles>=24.1.0",
"asyncpg>=0.30.0", "asyncpg>=0.30.0",
"ebooklib>=0.19", "ebooklib>=0.19",
@@ -41,3 +41,6 @@ dev = [
[tool.pytest.ini_options] [tool.pytest.ini_options]
asyncio_mode = "auto" asyncio_mode = "auto"
testpaths = ["tests"] testpaths = ["tests"]
filterwarnings = [
"ignore::jwt.warnings.InsecureKeyLengthWarning",
]

View File

@@ -3,7 +3,6 @@
pkgs.mkShell { pkgs.mkShell {
buildInputs = with pkgs; [ buildInputs = with pkgs; [
# Python development environment for Chitai # Python development environment for Chitai
python313Full
python313Packages.greenlet python313Packages.greenlet
python313Packages.ruff python313Packages.ruff
uv uv

View File

@@ -1,5 +1,6 @@
import asyncio import asyncio
from typing import Any from contextlib import asynccontextmanager
from typing import Any, AsyncGenerator
from chitai.services.book import BookService from chitai.services.book import BookService
from chitai.services.consume import ConsumeDirectoryWatcher from chitai.services.consume import ConsumeDirectoryWatcher
@@ -63,14 +64,15 @@ oauth2_auth = OAuth2PasswordBearerAuth[User](
"/access/signup", "/access/signup",
"/opds", "/opds",
"/schema", "/schema",
"/syncs",
"/users/auth",
], ],
) )
watcher_task: asyncio.Task watcher_task: asyncio.Task
async def startup(): @asynccontextmanager
"""Run setup.""" async def setup_db_connection(app: Litestar) -> AsyncGenerator[None, None]:
# Setup databse # Setup databse
async with settings.alchemy_config.get_session() as db_session: async with settings.alchemy_config.get_session() as db_session:
# Create default library if none exist # Create default library if none exist
@@ -86,21 +88,30 @@ async def startup():
) )
await db_session.commit() await db_session.commit()
# book_service = BookService(session=db_session) try:
yield
finally:
await db_session.aclose()
@asynccontextmanager
async def setup_directory_watcher(app: Litestar) -> AsyncGenerator[None, None]:
# Create book covers directory if it does not exist # Create book covers directory if it does not exist
await create_directory(settings.book_cover_path) await create_directory(settings.book_cover_path)
# Create consume directory # Create consume directory
await create_directory(settings.consume_path) await create_directory(settings.consume_path)
async with settings.alchemy_config.get_session() as db_session:
book_service = BookService(session=db_session)
library_service = LibraryService(session=db_session)
# file_watcher = ConsumeDirectoryWatcher(settings.consume_path, library_service, book_service) file_watcher = ConsumeDirectoryWatcher(settings.consume_path, library_service, book_service)
# watcher_task = asyncio.create_task(file_watcher.init_watcher()) watcher_task = asyncio.create_task(file_watcher.init_watcher())
async def shutdown():
""" Run shutdown tasks. """
try:
yield
finally:
watcher_task.cancel() watcher_task.cancel()
def create_app() -> Litestar: def create_app() -> Litestar:
@@ -114,12 +125,14 @@ def create_app() -> Litestar:
c.PublisherController, c.PublisherController,
c.TagController, c.TagController,
c.OpdsController, c.OpdsController,
c.DeviceController,
c.KosyncController,
create_static_files_router(path="/covers", directories=["./covers"]), create_static_files_router(path="/covers", directories=["./covers"]),
index, index,
healthcheck, healthcheck,
], ],
exception_handlers=exception_handlers, exception_handlers=exception_handlers,
on_startup=[startup], lifespan=[setup_db_connection, setup_directory_watcher],
plugins=[alchemy], plugins=[alchemy],
on_app_init=[oauth2_auth.on_app_init], on_app_init=[oauth2_auth.on_app_init],
openapi_config=OpenAPIConfig( openapi_config=OpenAPIConfig(

View File

@@ -6,3 +6,5 @@ from .author import AuthorController
from .tag import TagController from .tag import TagController
from .publisher import PublisherController from .publisher import PublisherController
from .opds import OpdsController from .opds import OpdsController
from .kosync_device import DeviceController
from .kosync_progress import KosyncController

View File

@@ -85,7 +85,7 @@ class BookController(Controller):
""" """
result = await books_service.create(data, library) result = await books_service.create_book(data, library)
book = await books_service.get(result.id) book = await books_service.get(result.id)
return books_service.to_schema(book, schema_type=s.BookRead) return books_service.to_schema(book, schema_type=s.BookRead)
@@ -213,7 +213,7 @@ class BookController(Controller):
Returns: Returns:
The updated book as a BookRead schema. The updated book as a BookRead schema.
""" """
await books_service.update(book_id, data, library) await books_service.update_book(book_id, data, library)
book = await books_service.get(book_id) book = await books_service.get(book_id)
return books_service.to_schema(book, schema_type=s.BookRead) return books_service.to_schema(book, schema_type=s.BookRead)
@@ -244,7 +244,7 @@ class BookController(Controller):
The updated book as a BookRead schema. The updated book as a BookRead schema.
""" """
await books_service.update(book_id, {"cover_image": data}, library) await books_service.update_book(book_id, {"cover_image": data}, library)
updated_book = await books_service.get(book_id) updated_book = await books_service.get(book_id)
return books_service.to_schema(updated_book, schema_type=s.BookRead) return books_service.to_schema(updated_book, schema_type=s.BookRead)
@@ -386,7 +386,7 @@ class BookController(Controller):
""" """
await books_service.delete(book_ids, library, delete_files=delete_files) await books_service.delete_books(book_ids, library, delete_files=delete_files)
@post(path="/progress/{book_id:int}") @post(path="/progress/{book_id:int}")
async def update_progress( async def update_progress(

View File

@@ -0,0 +1,54 @@
from advanced_alchemy.service.pagination import OffsetPagination
from chitai.database.models.kosync_device import KosyncDevice
from chitai.database.models.user import User
from chitai.schemas.kosync import KosyncDeviceCreate, KosyncDeviceRead
from chitai.services.kosync_device import KosyncDeviceService
from litestar import Controller, post, get, delete
from litestar.di import Provide
from chitai.services import dependencies as deps
class DeviceController(Controller):
""" Controller for managing KOReader devices."""
dependencies = {
"device_service": Provide(deps.provide_kosync_device_service)
}
path = "/devices"
@get()
async def get_devices(self, device_service: KosyncDeviceService, current_user: User) -> OffsetPagination[KosyncDeviceRead]:
""" Return a list of all the user's devices."""
devices = await device_service.list(
KosyncDevice.user_id == current_user.id
)
return device_service.to_schema(devices, schema_type=KosyncDeviceRead)
@post()
async def create_device(self, data: KosyncDeviceCreate, device_service: KosyncDeviceService, current_user: User) -> KosyncDeviceRead:
device = await device_service.create({
'name': data.name,
'user_id': current_user.id
})
return device_service.to_schema(device, schema_type=KosyncDeviceRead)
@delete("/{device_id:int}")
async def delete_device(self, device_id: int, device_service: KosyncDeviceService, current_user: User) -> None:
# Ensure the device exists and is owned by the user
device = await device_service.get_one(
KosyncDevice.id == device_id,
KosyncDevice.user_id == current_user.id
)
await device_service.delete(device.id)
@get("/{device_id:int}/regenerate")
async def regenerate_device_api_key(self, device_id: int, device_service: KosyncDeviceService, current_user: User) -> KosyncDeviceRead:
# Ensure the device exists and is owned by the user
device = await device_service.get_one(
KosyncDevice.id == device_id,
KosyncDevice.user_id == current_user.id
)
updated_device = await device_service.regenerate_api_key(device.id)
return device_service.to_schema(updated_device, schema_type=KosyncDeviceRead)

View File

@@ -0,0 +1,90 @@
from __future__ import annotations
from typing import Annotated
from litestar import Controller, get, put
from litestar.exceptions import HTTPException
from litestar.status_codes import HTTP_403_FORBIDDEN
from litestar.response import Response
from litestar.params import Parameter
from litestar.di import Provide
from chitai.database import models as m
from chitai.schemas.kosync import KosyncProgressUpdate, KosyncProgressRead
from chitai.services.book import BookService
from chitai.services.kosync_progress import KosyncProgressService
from chitai.services.filters.book import FileHashFilter
from chitai.services import dependencies as deps
from chitai.middleware.kosync_auth import kosync_api_key_auth
class KosyncController(Controller):
"""Controller for syncing progress with KOReader devices."""
middleware = [kosync_api_key_auth]
dependencies = {
"kosync_progress_service": Provide(deps.provide_kosync_progress_service),
"book_service": Provide(deps.provide_book_service),
"user": Provide(deps.provide_user_via_kosync_auth),
}
@put("/syncs/progress")
async def upload_progress(
self,
data: KosyncProgressUpdate,
book_service: BookService,
kosync_progress_service: KosyncProgressService,
user: m.User,
) -> None:
"""Upload book progress from a KOReader device."""
book = await book_service.get_one(FileHashFilter([data.document]))
await kosync_progress_service.upsert_progress(
user_id=user.id,
book_id=book.id,
document=data.document,
progress=data.progress,
percentage=data.percentage,
device=data.device,
device_id=data.device_id,
)
@get("/syncs/progress/{document_id:str}")
async def get_progress(
self,
document_id: str,
kosync_progress_service: KosyncProgressService,
user: m.User,
) -> KosyncProgressRead:
"""Return the Kosync progress record associated with the given document."""
progress = await kosync_progress_service.get_by_document_hash(user.id, document_id)
if not progress:
raise HTTPException(status_code=404, detail="No progress found for document")
return KosyncProgressRead(
document=progress.document,
progress=progress.progress,
percentage=progress.percentage,
device=progress.device,
device_id=progress.device_id,
)
@get("/users/auth")
async def authorize(
self, _api_key: Annotated[str, Parameter(header="X-AUTH-USER")]
) -> Response[dict[str, str]]:
"""Verify authentication (handled by middleware)."""
return Response(status_code=200, content={"authorized": "OK"})
@get("/users/register")
async def register(self) -> None:
"""User registration endpoint - disabled."""
raise HTTPException(
detail="User accounts must be created via the main application",
status_code=HTTP_403_FORBIDDEN,
)

View File

@@ -3,6 +3,8 @@ from .book import Book, Identifier, FileMetadata
from .book_list import BookList, BookListLink from .book_list import BookList, BookListLink
from .book_progress import BookProgress from .book_progress import BookProgress
from .book_series import BookSeries from .book_series import BookSeries
from .kosync_device import KosyncDevice
from .kosync_progress import KosyncProgress
from .library import Library from .library import Library
from .publisher import Publisher from .publisher import Publisher
from .tag import Tag, BookTagLink from .tag import Tag, BookTagLink

View File

@@ -0,0 +1,15 @@
from sqlalchemy import ColumnElement, ForeignKey
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import mapped_column
from advanced_alchemy.base import BigIntAuditBase
class KosyncDevice(BigIntAuditBase):
__tablename__ = "devices"
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False)
api_key: Mapped[str] = mapped_column(unique=True)
name: Mapped[str]
def __repr__(self) -> str:
return f"KosyncDevice({self.name!r})"

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
from typing import Optional
from sqlalchemy import ForeignKey
from sqlalchemy.orm import Mapped, mapped_column
from advanced_alchemy.base import BigIntAuditBase
class KosyncProgress(BigIntAuditBase):
"""Progress tracking for KOReader devices, keyed by document hash."""
__tablename__ = "kosync_progress"
user_id: Mapped[int] = mapped_column(
ForeignKey("users.id", ondelete="cascade"), nullable=False
)
book_id: Mapped[int] = mapped_column(
ForeignKey("books.id", ondelete="cascade"), nullable=False
)
document: Mapped[str] = mapped_column(nullable=False)
progress: Mapped[Optional[str]]
percentage: Mapped[Optional[float]]
device: Mapped[Optional[str]]
device_id: Mapped[Optional[str]]

View File

@@ -0,0 +1,37 @@
from chitai.services.user import UserService
from chitai.services.kosync_device import KosyncDeviceService
from litestar.middleware import (
AbstractAuthenticationMiddleware,
AuthenticationResult,
DefineMiddleware
)
from litestar.connection import ASGIConnection
from litestar.exceptions import NotAuthorizedException, PermissionDeniedException
from chitai.config import settings
class KosyncAuthenticationMiddleware(AbstractAuthenticationMiddleware):
async def authenticate_request(self, connection: ASGIConnection) -> AuthenticationResult:
"""Given a request, parse the header for Base64 encoded basic auth credentials. """
# retrieve the auth header
api_key = connection.headers.get("X-AUTH-USER", None)
if not api_key:
raise NotAuthorizedException()
try:
db_session = settings.alchemy_config.provide_session(connection.app.state, connection.scope)
user_service = UserService(db_session)
device_service = KosyncDeviceService(db_session)
device = await device_service.get_by_api_key(api_key)
user = await user_service.get(device.user_id)
return AuthenticationResult(user=user, auth=None)
except PermissionDeniedException as exc:
print(exc)
raise NotAuthorizedException()
kosync_api_key_auth = DefineMiddleware(KosyncAuthenticationMiddleware)

View File

@@ -0,0 +1,36 @@
from pydantic import BaseModel
class KosyncProgressUpdate(BaseModel):
"""Schema for uploading progress from KOReader."""
document: str
progress: str | None = None
percentage: float
device: str | None = None
device_id: str | None = None
class KosyncProgressRead(BaseModel):
"""Schema for reading progress to KOReader."""
document: str
progress: str | None = None
percentage: float | None = None
device: str | None = None
device_id: str | None = None
class KosyncDeviceRead(BaseModel):
"""Schema for reading device information."""
id: int
user_id: int
api_key: str
name: str
class KosyncDeviceCreate(BaseModel):
"""Schema for creating a new device."""
name: str

View File

@@ -6,3 +6,5 @@ from .author import AuthorService
from .tag import TagService from .tag import TagService
from .publisher import PublisherService from .publisher import PublisherService
from .book_progress import BookProgressService from .book_progress import BookProgressService
from .kosync_device import KosyncDeviceService
from .kosync_progress import KosyncProgressService

View File

@@ -17,6 +17,7 @@ from advanced_alchemy.service import (
SQLAlchemyAsyncRepositoryService, SQLAlchemyAsyncRepositoryService,
ModelDictT, ModelDictT,
is_dict, is_dict,
schema_dump
) )
from advanced_alchemy.repository import SQLAlchemyAsyncRepository from advanced_alchemy.repository import SQLAlchemyAsyncRepository
from advanced_alchemy.filters import CollectionFilter from advanced_alchemy.filters import CollectionFilter
@@ -37,21 +38,19 @@ from chitai.database.models import (
BookSeries, BookSeries,
FileMetadata, FileMetadata,
Identifier, Identifier,
BookList,
Library, Library,
) )
from chitai.database.models.book_progress import BookProgress
from chitai.schemas.book import BooksCreateFromFiles from chitai.schemas.book import BooksCreateFromFiles
from chitai.services.filesystem_library import BookPathGenerator from chitai.services.filesystem_library import BookPathGenerator
from chitai.services.metadata_extractor import Extractor as MetadataExtractor from chitai.services.metadata_extractor import Extractor as MetadataExtractor
from chitai.services.utils import ( from chitai.services.utils import (
calculate_koreader_hash,
cleanup_empty_parent_directories, cleanup_empty_parent_directories,
delete_directory,
delete_file, delete_file,
is_empty,
move_dir_contents, move_dir_contents,
move_file, move_file,
save_image, save_image,
StreamingHasher,
) )
@@ -67,7 +66,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
async def create(self, data: ModelDictT[Book], library: Library, **kwargs) -> Book: async def create_book(self, data: ModelDictT[Book], library: Library, **kwargs) -> Book:
""" """
Create a new book entity. Create a new book entity.
@@ -83,9 +82,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
Returns: Returns:
The created Book entity. The created Book entity.
""" """
if not is_dict(data): data = schema_dump(data)
data = data.model_dump()
await self._parse_metadata_from_files(data) await self._parse_metadata_from_files(data)
await self._save_cover_image(data) await self._save_cover_image(data)
@@ -126,7 +123,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
books[filepath].append(file) books[filepath].append(file)
return [ return [
await self.create( await self.create_book(
{"files": [file for file in files], "library_id": library.id}, {"files": [file for file in files], "library_id": library.id},
library, library,
**kwargs, **kwargs,
@@ -177,10 +174,10 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
file_metadata = [] file_metadata = []
for file in files: for file in files:
stats = await aios.stat(file) stats = await aios.stat(file)
file_size = stats.st_size file_size = stats.st_size
content_type, _ = mimetypes.guess_type(file) content_type, _ = mimetypes.guess_type(file)
file_hash = await calculate_koreader_hash(file)
filename = path_gen.generate_filename(data, Path(file.name)) filename = path_gen.generate_filename(data, Path(file.name))
@@ -188,7 +185,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
FileMetadata( FileMetadata(
path=str(filename), path=str(filename),
size=file_size, size=file_size,
hash="stub-hash", # TODO: implement file hashing to catch duplicates hash=file_hash,
content_type=content_type, content_type=content_type,
) )
) )
@@ -211,7 +208,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
return books return books
async def delete( async def delete_books(
self, self,
book_ids: list[int], book_ids: list[int],
library: Library, library: Library,
@@ -263,6 +260,9 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
ValueError: If the file is missing or not found for the given book. ValueError: If the file is missing or not found for the given book.
""" """
book = await self.get(book_id) book = await self.get(book_id)
if book.path is None:
raise ValueError("Cannot download file: book has no path")
for file in book.files: for file in book.files:
if file.id != file_id: if file.id != file_id:
continue continue
@@ -309,7 +309,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
break break
yield chunk yield chunk
async def update( async def update_book(
self, book_id: int, update_data: ModelDictT[Book], library: Library self, book_id: int, update_data: ModelDictT[Book], library: Library
) -> Book: ) -> Book:
""" """
@@ -343,7 +343,8 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
await self._save_cover_image(data) await self._save_cover_image(data)
# TODO: extract out into its own function _update_book_path # TODO: extract out into its own function _update_book_path
# Check if file path must be updated # Check if file path must be updated (only for books with files)
if book.path is not None:
path_gen = BookPathGenerator(library.root_path) path_gen = BookPathGenerator(library.root_path)
updated_path = path_gen.generate_path(book.to_dict() | data) updated_path = path_gen.generate_path(book.to_dict() | data)
if str(updated_path) != book.path: if str(updated_path) != book.path:
@@ -368,9 +369,9 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
book = await self.get(book_id) book = await self.get(book_id)
data = book.to_dict() data = book.to_dict()
data["files"] = files data["files"] = files
await self._save_book_files(library, data) new_files = await self._save_book_files(library, data)
book.files.extend(data["files"]) book.files.extend(new_files)
await self.update(book.id, {"files": [file for file in book.files]}, library) await self.update_book(book.id, {"files": [file for file in book.files]}, library)
async def remove_files( async def remove_files(
self, book_id: int, file_ids: list[int], delete_files: bool, library: Library self, book_id: int, file_ids: list[int], delete_files: bool, library: Library
@@ -388,7 +389,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
""" """
book = await self.get_one(Book.id == book_id, Book.library_id == library.id) book = await self.get_one(Book.id == book_id, Book.library_id == library.id)
if delete_files: if delete_files and book.path is not None:
# TODO: Extract this out into its own function # TODO: Extract this out into its own function
for file in (file for file in book.files if file.id in file_ids): for file in (file for file in book.files if file.id in file_ids):
full_path = Path(book.path) / Path(file.path) full_path = Path(book.path) / Path(file.path)
@@ -447,7 +448,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
return data return data
async def _populate_with_unique_relationships(self, data: ModelDictT[Book]): async def _populate_with_unique_relationships(self, data: ModelDictT[Book]) -> ModelDictT[Book]:
""" """
Ensure relationship entities (authors, series, tags, etc.) are unique in the database. Ensure relationship entities (authors, series, tags, etc.) are unique in the database.
@@ -509,7 +510,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
return model_data return model_data
async def _save_book_files(self, library: Library, data: dict) -> dict: async def _save_book_files(self, library: Library, data: dict) -> list[FileMetadata]:
""" """
Save uploaded book files to the filesystem. Save uploaded book files to the filesystem.
@@ -541,10 +542,13 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
await file.seek(0) await file.seek(0)
path = parent / filename path = parent / filename
path.parent.mkdir(parents=True, exist_ok=True) path.parent.mkdir(parents=True, exist_ok=True)
hasher = StreamingHasher()
async with aiofiles.open(path, "wb") as dest: async with aiofiles.open(path, "wb") as dest:
# Read spooled file and save it to the local filesystem # Read spooled file and save it to the local filesystem
while chunk := await file.read(CHUNK_SIZE): while chunk := await file.read(CHUNK_SIZE):
await dest.write(chunk) await dest.write(chunk)
hasher.update(chunk)
stats = await aios.stat(path) stats = await aios.stat(path)
file_size = stats.st_size file_size = stats.st_size
@@ -553,13 +557,13 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
FileMetadata( FileMetadata(
path=str(filename), path=str(filename),
size=file_size, size=file_size,
hash="stub-hash", # TODO: implement file hashing to catch duplicates hash=hasher.hexdigest(),
content_type=file.content_type, content_type=file.content_type,
) )
) )
data["files"] = file_metadata data["files"] = file_metadata
return data return data["files"]
async def _parse_metadata_from_files(self, data: dict, root_path: Path | None = None) -> dict: async def _parse_metadata_from_files(self, data: dict, root_path: Path | None = None) -> dict:
""" """

View File

@@ -36,6 +36,8 @@ from chitai.services import (
TagService, TagService,
AuthorService, AuthorService,
PublisherService, PublisherService,
KosyncDeviceService,
KosyncProgressService,
) )
from chitai.config import settings from chitai.config import settings
from chitai.services.filters.book import ( from chitai.services.filters.book import (
@@ -340,3 +342,12 @@ def provide_optional_user(request: Request[m.User, Token, Any]) -> m.User | None
async def provide_user_via_basic_auth(request: Request[m.User, None, Any]) -> m.User: async def provide_user_via_basic_auth(request: Request[m.User, None, Any]) -> m.User:
return request.user return request.user
async def provide_user_via_kosync_auth(request: Request[m.User, None, Any]) -> m.User:
return request.user
provide_kosync_device_service = create_service_provider(KosyncDeviceService)
provide_kosync_progress_service = create_service_provider(KosyncProgressService)

View File

@@ -138,7 +138,7 @@ class ProgressFilter(StatementFilter):
m.BookProgress.completed == False, m.BookProgress.completed == False,
m.BookProgress.completed.is_(None), m.BookProgress.completed.is_(None),
), ),
m.BookProgress.progress > 0, m.BookProgress.percentage > 0,
) )
) )
@@ -154,7 +154,6 @@ class ProgressFilter(StatementFilter):
@dataclass @dataclass
class FileFilter(StatementFilter): class FileFilter(StatementFilter):
"""Filter books that are related to the given files.""" """Filter books that are related to the given files."""
file_ids: list[int] file_ids: list[int]
def append_to_statement( def append_to_statement(
@@ -166,6 +165,16 @@ class FileFilter(StatementFilter):
return super().append_to_statement(statement, model, *args, **kwargs) return super().append_to_statement(statement, model, *args, **kwargs)
@dataclass
class FileHashFilter(StatementFilter):
file_hashes: list[str]
def append_to_statement(self, statement: StatementTypeT, model: type[ModelT], *args, **kwargs) -> StatementTypeT:
statement = statement.where(
m.Book.files.any(m.FileMetadata.hash.in_(self.file_hashes))
)
return super().append_to_statement(statement, model, *args, **kwargs)
@dataclass @dataclass
class CustomOrderBy(StatementFilter): class CustomOrderBy(StatementFilter):

View File

@@ -0,0 +1,35 @@
from __future__ import annotations
import secrets
from chitai.database.models.kosync_device import KosyncDevice
from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, ModelDictT, schema_dump
from advanced_alchemy.repository import SQLAlchemyAsyncRepository
class KosyncDeviceService(SQLAlchemyAsyncRepositoryService[KosyncDevice]):
"""Service for managing KOReader devices."""
API_KEY_LENGTH_IN_BYTES = 8
class Repo(SQLAlchemyAsyncRepository[KosyncDevice]):
""" Repository for KosyncDevice entities."""
model_type = KosyncDevice
repository_type = Repo
async def create(self, data: ModelDictT[KosyncDevice], **kwargs) -> KosyncDevice:
data = schema_dump(data)
data['api_key'] = self._generate_api_key()
return await super().create(data, **kwargs)
async def get_by_api_key(self, api_key: str) -> KosyncDevice:
return await self.get_one(KosyncDevice.api_key == api_key)
async def regenerate_api_key(self, device_id: int) -> KosyncDevice:
device = await self.get(device_id)
api_key = self._generate_api_key()
device.api_key = api_key
return await self.update(device)
def _generate_api_key(self) -> str:
return secrets.token_hex(self.API_KEY_LENGTH_IN_BYTES)

View File

@@ -0,0 +1,51 @@
from __future__ import annotations
from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService
from advanced_alchemy.repository import SQLAlchemyAsyncRepository
from chitai.database.models.kosync_progress import KosyncProgress
class KosyncProgressService(SQLAlchemyAsyncRepositoryService[KosyncProgress]):
"""Service for managing KOReader sync progress."""
class Repo(SQLAlchemyAsyncRepository[KosyncProgress]):
"""Repository for KosyncProgress entities."""
model_type = KosyncProgress
repository_type = Repo
async def get_by_document_hash(self, user_id: int, document: str) -> KosyncProgress | None:
"""Get progress for a specific document and user."""
return await self.get_one_or_none(
KosyncProgress.user_id == user_id,
KosyncProgress.document == document,
)
async def upsert_progress(
self,
user_id: int,
book_id: int,
document: str,
progress: str | None,
percentage: float,
device: str | None = None,
device_id: str | None = None,
) -> KosyncProgress:
"""Create or update progress for a document."""
existing = await self.get_by_document_hash(user_id, document)
data = {
"user_id": user_id,
"book_id": book_id,
"document": document,
"progress": progress,
"percentage": percentage,
"device": device,
"device_id": device_id,
}
if existing:
return await self.update(data, item_id=existing.id)
return await self.create(data)

View File

@@ -1,9 +1,15 @@
# src/chitai/services/utils.py # src/chitai/services/utils.py
# Standard library # Standard library
from __future__ import annotations
import hashlib
from pathlib import Path from pathlib import Path
import shutil import shutil
from typing import BinaryIO from typing import TYPE_CHECKING, BinaryIO
if TYPE_CHECKING:
from hashlib import _Hash
# Third-party libraries # Third-party libraries
import PIL import PIL
@@ -12,6 +18,120 @@ import aiofiles
import aiofiles.os as aios import aiofiles.os as aios
from litestar.datastructures import UploadFile from litestar.datastructures import UploadFile
##################################
# KOReader file hash utilities #
##################################
# KOReader partial MD5 constants
# These match KOReader's partial MD5 implementation for document identification
# KOReader samples 1024 bytes at specific offsets calculated using 32-bit left shift.
# The shift wrapping behavior (shift & 0x1F) causes i=-1 to produce offset 0.
# Offsets: 0, 1024, 4096, 16384, 65536, 262144, 1048576, ...
KO_STEP = 1024
KO_SAMPLE_SIZE = 1024
KO_INDICES = range(-1, 11) # -1 to 10 inclusive
def _lshift32(val: int, shift: int) -> int:
"""
32-bit left shift matching LuaJIT's bit.lshift behavior.
LuaJIT masks the shift amount to 5 bits (0-31) and performs 32-bit arithmetic.
This causes negative shifts to wrap: shift=-2 becomes shift=30, and
1024 << 30 overflows 32 bits to produce 0.
"""
val &= 0xFFFFFFFF
shift &= 0x1F
return (val << shift) & 0xFFFFFFFF
def _get_koreader_offsets() -> list[int]:
"""Get all KOReader sampling offsets."""
return [_lshift32(KO_STEP, 2 * i) for i in KO_INDICES]
def _partial_md5_from_chunk(
chunk: bytes,
hasher: hashlib._Hash,
offsets: list[int],
chunk_start: int,
) -> None:
"""
Update partial MD5 hasher with sampled bytes from a chunk.
KOReader samples 1024 bytes at specific offsets rather than hashing
the entire file. This function checks if any sampling offsets fall
within the current chunk and updates the hasher with those bytes.
Args:
chunk: The current chunk of file data.
hasher: The MD5 hasher to update.
offsets: List of byte offsets to sample from the file.
chunk_start: The starting byte position of this chunk in the file.
"""
chunk_len = len(chunk)
for offset in offsets:
if chunk_start <= offset < chunk_start + chunk_len:
start = offset - chunk_start
end = min(start + KO_SAMPLE_SIZE, chunk_len)
hasher.update(chunk[start:end])
async def calculate_koreader_hash(file_path: Path) -> str:
"""
Calculate KOReader-compatible partial MD5 hash for a file.
KOReader uses a partial MD5 algorithm that samples 1024 bytes at specific
offsets rather than hashing the entire file. This provides fast document
identification for large ebook files.
The offsets are calculated using 32-bit left shift: 1024 << (2*i) for i from -1 to 10.
Due to 32-bit overflow, i=-1 produces offset 0:
0, 1024, 4096, 16384, 65536, 262144, 1048576, 4194304, ...
Args:
file_path: Path to the file to hash.
Returns:
The hexadecimal MD5 hash string.
"""
hasher = hashlib.md5()
offsets = _get_koreader_offsets()
file_pos = 0
chunk_size = 262144 # 256 KiB
async with aiofiles.open(file_path, "rb") as f:
while chunk := await f.read(chunk_size):
_partial_md5_from_chunk(chunk, hasher, offsets, file_pos)
file_pos += len(chunk)
return hasher.hexdigest()
class StreamingHasher:
"""
Helper class for calculating KOReader hash while streaming file data.
Allows hash calculation during file writes without needing to re-read
the file after writing.
"""
def __init__(self) -> None:
self.hasher = hashlib.md5()
self.offsets = _get_koreader_offsets()
self.position = 0
def update(self, chunk: bytes) -> None:
"""Update hash with a chunk of data."""
_partial_md5_from_chunk(chunk, self.hasher, self.offsets, self.position)
self.position += len(chunk)
def hexdigest(self) -> str:
"""Return the final hash."""
return self.hasher.hexdigest()
################################## ##################################
# Filesystem related utilities # # Filesystem related utilities #
################################## ##################################

View File

@@ -202,6 +202,26 @@ async def bookshelf_service(
yield service yield service
@pytest.fixture
async def kosync_progress_service(
sessionmaker: async_sessionmaker[AsyncSession],
) -> AsyncGenerator[services.KosyncProgressService, None]:
"""Create KosyncProgressService instance."""
async with sessionmaker() as session:
async with services.KosyncProgressService.new(session) as service:
yield service
@pytest.fixture
async def kosync_device_service(
sessionmaker: async_sessionmaker[AsyncSession],
) -> AsyncGenerator[services.KosyncDeviceService, None]:
"""Create KosyncDeviceService instance."""
async with sessionmaker() as session:
async with services.KosyncDeviceService.new(session) as service:
yield service
# Data fixtures # Data fixtures

View File

@@ -28,7 +28,7 @@ from pathlib import Path
), ),
( (
Path( Path(
"/home/patrick/projects/chitai-api/tests/data_files/On The Origin of Species By Means of Natural Selection - Charles Darwin.epub" "tests/data_files/On The Origin of Species By Means of Natural Selection - Charles Darwin.epub"
), ),
2, 2,
"On the Origin of Species By Means of Natural Selection / Or, the Preservation of Favoured Races in the Struggle for Life", "On the Origin of Species By Means of Natural Selection / Or, the Preservation of Favoured Races in the Struggle for Life",
@@ -107,7 +107,7 @@ async def test_upload_book_without_data(
), ),
( (
Path( Path(
"/home/patrick/projects/chitai-api/tests/data_files/On The Origin of Species By Means of Natural Selection - Charles Darwin.epub" "tests/data_files/On The Origin of Species By Means of Natural Selection - Charles Darwin.epub"
), ),
2, 2,
"On the Origin of Species By Means of Natural Selection", "On the Origin of Species By Means of Natural Selection",
@@ -327,7 +327,7 @@ async def test_update_reading_progress(
# Update progress # Update progress
progress_data = { progress_data = {
"progress": 0.5, "percentage": 0.5,
} }
response = await populated_authenticated_client.post( response = await populated_authenticated_client.post(
@@ -343,7 +343,7 @@ async def test_update_reading_progress(
assert response.status_code == 200 assert response.status_code == 200
book = response.json() book = response.json()
assert book["progress"]["progress"] == 0.5 assert book["progress"]["percentage"] == 0.5
async def test_create_multiple_books_from_directory( async def test_create_multiple_books_from_directory(

View File

@@ -0,0 +1,98 @@
"""Tests for KOReader-compatible file hash generation."""
import pytest
from httpx import AsyncClient
from pathlib import Path
# Known KOReader hashes for test files
TEST_FILES = {
"Moby Dick; Or, The Whale - Herman Melville.epub": {
"path": Path("tests/data_files/Moby Dick; Or, The Whale - Herman Melville.epub"),
"hash": "ceeef909ec65653ba77e1380dff998fb",
"content_type": "application/epub+zip",
},
"Calculus Made Easy - Silvanus Thompson.pdf": {
"path": Path("tests/data_files/Calculus Made Easy - Silvanus Thompson.pdf"),
"hash": "ace67d512efd1efdea20f3c2436b6075",
"content_type": "application/pdf",
},
}
@pytest.mark.parametrize(
("book_name",),
[(name,) for name in TEST_FILES.keys()],
)
async def test_upload_book_generates_correct_hash(
authenticated_client: AsyncClient,
book_name: str,
) -> None:
"""Test that uploading a book generates the correct KOReader-compatible hash."""
book_info = TEST_FILES[book_name]
file_content = book_info["path"].read_bytes()
files = [("files", (book_name, file_content, book_info["content_type"]))]
data = {"library_id": "1"}
response = await authenticated_client.post(
"/books?library_id=1",
files=files,
data=data,
)
assert response.status_code == 201
book_data = response.json()
assert len(book_data["files"]) == 1
file_metadata = book_data["files"][0]
assert "hash" in file_metadata
assert file_metadata["hash"] == book_info["hash"]
async def test_add_file_to_book_generates_correct_hash(
authenticated_client: AsyncClient,
) -> None:
"""Test that adding a file to an existing book generates the correct hash."""
# Create a book with the first file
first_book = TEST_FILES["Moby Dick; Or, The Whale - Herman Melville.epub"]
first_content = first_book["path"].read_bytes()
files = [("files", (first_book["path"].name, first_content, first_book["content_type"]))]
data = {"library_id": "1"}
create_response = await authenticated_client.post(
"/books?library_id=1",
files=files,
data=data,
)
assert create_response.status_code == 201
book_id = create_response.json()["id"]
# Add the second file to the book
second_book = TEST_FILES["Calculus Made Easy - Silvanus Thompson.pdf"]
second_content = second_book["path"].read_bytes()
add_files = [("data", (second_book["path"].name, second_content, second_book["content_type"]))]
add_response = await authenticated_client.post(
f"/books/{book_id}/files",
files=add_files,
)
assert add_response.status_code == 201
updated_book = add_response.json()
# Verify both files have correct hashes
assert len(updated_book["files"]) == 2
for file_metadata in updated_book["files"]:
assert "hash" in file_metadata
epub_file = next(f for f in updated_book["files"] if f["path"].endswith(".epub"))
pdf_file = next(f for f in updated_book["files"] if f["path"].endswith(".pdf"))
assert epub_file["hash"] == first_book["hash"]
assert pdf_file["hash"] == second_book["hash"]

View File

@@ -1,10 +1,6 @@
"""Tests for BookService""" """Tests for BookService"""
import pytest import pytest
from pathlib import Path
import aiofiles.os as aios import aiofiles.os as aios
from chitai.schemas import BookCreate from chitai.schemas import BookCreate
@@ -29,16 +25,17 @@ class TestBookServiceCRUD:
) )
book = await books_service.to_model_on_create(book_data.model_dump()) book = await books_service.to_model_on_create(book_data.model_dump())
assert isinstance(book, m.Book)
# Add path manually as it won't be generated (not using the create function, but manually inserting into db) # Add path manually as it won't be generated (not using the create function, but manually inserting into db)
book.path = f"{test_library.root_path}/J.R.R Tolkien/The Fellowship of the Ring" book.path = f"{test_library.root_path}/J.R.R Tolkien/The Fellowship of the Ring"
await aios.makedirs(book.path) await aios.makedirs(book.path) # type: ignore[arg-type]
books_service.repository.session.add(book) books_service.repository.session.add(book)
await books_service.repository.session.commit() await books_service.repository.session.commit()
await books_service.repository.session.refresh(book) await books_service.repository.session.refresh(book)
await books_service.update( await books_service.update_book(
book.id, book.id,
{ {
"title": "The Fellowship of the Ring", "title": "The Fellowship of the Ring",
@@ -66,9 +63,7 @@ class TestBookServiceCRUD:
assert updated_book.identifiers[0].value == "9780261102354" assert updated_book.identifiers[0].value == "9780261102354"
assert updated_book.edition == 3 assert updated_book.edition == 3
assert updated_book.publisher is not None
assert updated_book.publisher.name == "Tolkien Estate" assert updated_book.publisher.name == "Tolkien Estate"
assert len(updated_book.tags) == 2 assert len(updated_book.tags) == 2
# book = await books_service.create(book_data.model_dump())

954
backend/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -14,42 +14,42 @@
"lint": "prettier --check . && eslint ." "lint": "prettier --check . && eslint ."
}, },
"devDependencies": { "devDependencies": {
"@eslint/compat": "^1.4.0", "@eslint/compat": "^1.4.1",
"@eslint/js": "^9.38.0", "@eslint/js": "^9.39.4",
"@iconify/svelte": "^5.0.2", "@iconify/svelte": "^5.2.1",
"@internationalized/date": "^3.10.0", "@internationalized/date": "^3.12.0",
"@lucide/svelte": "^0.544.0", "@lucide/svelte": "^0.544.0",
"@sveltejs/adapter-node": "^5.4.0", "@sveltejs/adapter-node": "^5.5.4",
"@sveltejs/kit": "^2.48.0", "@sveltejs/kit": "^2.53.4",
"@sveltejs/vite-plugin-svelte": "^6.2.1", "@sveltejs/vite-plugin-svelte": "^6.2.4",
"@tailwindcss/vite": "^4.1.16", "@tailwindcss/vite": "^4.2.1",
"@types/node": "^22.18.12", "@types/node": "^22.19.15",
"bits-ui": "^2.14.0", "bits-ui": "^2.16.3",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"eslint": "^9.38.0", "eslint": "^9.39.4",
"eslint-config-prettier": "^10.1.8", "eslint-config-prettier": "^10.1.8",
"eslint-plugin-svelte": "^3.12.5", "eslint-plugin-svelte": "^3.15.0",
"globals": "^16.4.0", "globals": "^16.5.0",
"jsrepo": "^2.5.0", "jsrepo": "^2.5.2",
"openapi-typescript": "^7.10.1", "openapi-typescript": "^7.13.0",
"prettier": "^3.6.2", "prettier": "^3.8.1",
"prettier-plugin-svelte": "^3.4.0", "prettier-plugin-svelte": "^3.5.1",
"prettier-plugin-tailwindcss": "^0.6.14", "prettier-plugin-tailwindcss": "^0.6.14",
"svelte": "^5.41.0", "svelte": "^5.53.7",
"svelte-check": "^4.3.3", "svelte-check": "^4.4.5",
"tailwind-merge": "^3.3.1", "tailwind-merge": "^3.5.0",
"tailwind-scrollbar": "^4.0.2", "tailwind-scrollbar": "^4.0.2",
"tailwind-variants": "^3.1.1", "tailwind-variants": "^3.2.2",
"tailwindcss": "^4.1.16", "tailwindcss": "^4.2.1",
"tw-animate-css": "^1.4.0", "tw-animate-css": "^1.4.0",
"typescript": "^5.9.3", "typescript": "^5.9.3",
"typescript-eslint": "^8.46.2", "typescript-eslint": "^8.56.1",
"vite": "^7.1.12" "vite": "^7.3.1"
}, },
"dependencies": { "dependencies": {
"epubjs": "^0.3.93", "epubjs": "^0.3.93",
"mode-watcher": "^1.1.0", "mode-watcher": "^1.1.0",
"svelte-sonner": "^1.0.5", "svelte-sonner": "^1.0.8",
"zod": "^4.1.12" "zod": "^4.3.6"
} }
} }

2499
frontend/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
import { command, form, getRequestEvent, query } from '$app/server'; import { command, form, getRequestEvent, query } from '$app/server';
import { loginSchema, signupSchema } from '$lib/schema/auth'; import { loginSchema, signupSchema } from '$lib/schema/auth';
import { BACKEND_API_URL } from '$lib/server/config'; import { BACKEND_API_URL } from '$lib/server/config';
import { redirect } from '@sveltejs/kit'; import { invalid, redirect } from '@sveltejs/kit';
export const login = form(loginSchema, async (data, invalid) => { export const login = form(loginSchema, async (data, issue) => {
const { cookies, locals } = getRequestEvent(); const { cookies, locals } = getRequestEvent();
// Create URL-encoded form data // Create URL-encoded form data
@@ -19,11 +19,11 @@ export const login = form(loginSchema, async (data, invalid) => {
if (!response.ok) { if (!response.ok) {
if (response.status === 401) { if (response.status === 401) {
invalid(invalid.email('Invalid login credentials')); invalid(issue.email('Invalid login credentials'));
} else { } else {
const message = await response.text(); const message = await response.text();
console.error('Unknown error: ', message); console.error('Unknown error: ', message);
invalid(invalid.email('An unknown error occurred')); invalid(issue.email('An unknown error occurred'));
} }
} }
@@ -40,7 +40,7 @@ export const login = form(loginSchema, async (data, invalid) => {
redirect(303, '/'); redirect(303, '/');
}); });
export const signup = form(signupSchema, async (data, invalid) => { export const signup = form(signupSchema, async (data, issue) => {
const response = await fetch(`${BACKEND_API_URL}/access/signup`, { const response = await fetch(`${BACKEND_API_URL}/access/signup`, {
method: 'POST', method: 'POST',
body: JSON.stringify(data), body: JSON.stringify(data),
@@ -49,11 +49,11 @@ export const signup = form(signupSchema, async (data, invalid) => {
if (!response.ok) { if (!response.ok) {
if (response.status == 409) { if (response.status == 409) {
invalid(invalid.email('Email is already in use by another account')); invalid(issue.email('Email is already in use by another account'));
} else { } else {
const message = await response.text(); const message = await response.text();
console.error('Unknown error: ', message); console.error('Unknown error: ', message);
invalid(invalid.email('An unknown error occurred')); invalid(issue.email('An unknown error occurred'));
} }
} }
}); });

View File

@@ -0,0 +1,43 @@
import { command, form, getRequestEvent, query } from '$app/server';
import { createDeviceSchema, type Device } from '$lib/schema/device';
import { error } from '@sveltejs/kit';
import z from 'zod';
export const listDevices = query(async (): Promise<Device[]> => {
const { locals } = getRequestEvent();
const response = await locals.api.get(`/devices`);
if (!response.ok) error(500, 'An unkown error occurred');
const deviceResult = await response.json();
return deviceResult.items
});
export const createDevice = form(createDeviceSchema, async (data): Promise<Device> => {
const { locals } = getRequestEvent();
const response = await locals.api.post(`/devices`, data);
if (!response.ok) error(500, 'An unknown error occurred');
return await response.json();
});
export const regenerateDeviceApiKey = command(z.string(), async (deviceId): Promise<Device> => {
const { locals } = getRequestEvent();
const response = await locals.api.get(`/devices/${deviceId}/regenerate`);
if (!response.ok) error(500, 'An unknown error occurred');
return await response.json();
})
export const deleteDevice = command(z.string(), async (deviceId): Promise<void> => {
const { locals } = getRequestEvent();
const response = await locals.api.delete(`/devices/${deviceId}`);
if (!response.ok) error(500, 'An unknown error occurred');
});

View File

@@ -0,0 +1,50 @@
<script lang="ts">
import { createDevice } from '$lib/api/device.remote';
import * as Field from '$lib/components/ui/field/index.js';
import { Input } from '$lib/components/ui/input/index.js';
import { Button } from '$lib/components/ui/button/index.js';
import { toast } from 'svelte-sonner';
interface Props {
onSuccess?: () => void;
}
let { onSuccess }: Props = $props();
</script>
<form
{...createDevice.enhance(async ({ form, submit }) => {
try {
await submit();
const issues = createDevice.fields.allIssues();
if (issues && issues.length > 0) {
return;
}
const deviceName = createDevice.fields.name.value();
form.reset();
toast.success(`Device '${deviceName}' created`);
onSuccess?.();
} catch (error) {
console.error('Failed to create device: ', error);
toast.error('Failed to create device');
}
})}
class="flex flex-col gap-3"
>
<Field.Set>
<Field.Group>
<Field.Field>
<Field.Label for="name">Device name</Field.Label>
<Input {...createDevice.fields.name.as('text')} placeholder="e.g. Kindle Paperwhite" />
{#each createDevice.fields.name.issues() ?? [] as issue}
<Field.Error>{issue.message}</Field.Error>
{/each}
</Field.Field>
</Field.Group>
</Field.Set>
<Button type="submit" class="ml-auto w-24">Create</Button>
</form>

View File

@@ -0,0 +1,8 @@
import { z } from 'zod';
import type { components } from './openapi/schema';
export type Device = components['schemas']['KosyncDeviceRead']
export const createDeviceSchema = z.object({
name: z.string().min(1, 'Name cannot be empty')
})

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,5 @@
<script lang="ts"> <script lang="ts">
import * as Sidebar from '$lib/components/ui/sidebar/index.js';
import { page } from '$app/state'; import { page } from '$app/state';
import Separator from '$lib/components/ui/separator/separator.svelte';
let { children } = $props(); let { children } = $props();
const items = [ const items = [
@@ -12,40 +10,29 @@
{ {
title: 'Libraries', title: 'Libraries',
url: '/settings/libraries' url: '/settings/libraries'
},
{
title: 'Devices',
url: '/settings/devices'
} }
]; ];
</script> </script>
<div class="flex h-[calc(100vh-var(--header-height)-2rem)] flex-col"> <div class="flex h-[calc(100vh-var(--header-height)-2rem)] flex-col">
<h1 class="text-xl font-bold">Settings</h1> <h1 class="text-xl font-bold">Settings</h1>
<Separator class="my-4" /> <div class="mt-6 flex flex-1 gap-8 overflow-hidden">
<div class="flex flex-1 gap-6 overflow-hidden"> <nav class="flex w-48 shrink-0 flex-col gap-1">
<div class="flex gap-8">
<Sidebar.Provider class="shrink-0">
<Sidebar.Inset>
<Sidebar.Content>
<Sidebar.Group class="flex flex-col gap-1">
{#each items as item} {#each items as item}
<Sidebar.MenuItem class="w-48"> <a
<Sidebar.MenuButton href={item.url}
class="rounded hover:bg-muted {page.url.pathname.endsWith(item.url) class="rounded-md px-3 py-2 text-sm font-medium transition-colors hover:bg-muted {page.url.pathname.endsWith(item.url)
? 'bg-muted' ? 'bg-muted'
: ''}" : 'text-muted-foreground'}"
> >
{#snippet child({ props })} {item.title}
<a href={item.url} {...props}>
<span class="text-base">{item.title}</span>
</a> </a>
{/snippet}
</Sidebar.MenuButton>
</Sidebar.MenuItem>
{/each} {/each}
</Sidebar.Group> </nav>
</Sidebar.Content> <div class="flex-1 overflow-auto">{@render children()}</div>
</Sidebar.Inset>
</Sidebar.Provider>
</div>
<Separator orientation="vertical" class="self-stretch" />
<div class="mx-2 mt-2 flex-1 overflow-auto">{@render children()}</div>
</div> </div>
</div> </div>

View File

@@ -0,0 +1,9 @@
import { listDevices } from "$lib/api/device.remote";
export async function load() {
const devices = await listDevices();
return {
devices
};
}

View File

@@ -0,0 +1,240 @@
<script lang="ts">
import * as Table from '$lib/components/ui/table/index.js';
import * as Card from '$lib/components/ui/card/index.js';
import * as Dialog from '$lib/components/ui/dialog/index.js';
import * as DropdownMenu from '$lib/components/ui/dropdown-menu/index.js';
import * as AlertDialog from '$lib/components/ui/alert-dialog/index.js';
import * as Empty from '$lib/components/ui/empty/index.js';
import { Button, buttonVariants } from '$lib/components/ui/button/index.js';
import {
EllipsisVertical,
Plus,
Eye,
EyeOff,
Copy,
RefreshCw,
Trash2,
Smartphone
} from '@lucide/svelte';
import DeviceCreateForm from '$lib/components/forms/device-create-form.svelte';
import { deleteDevice, regenerateDeviceApiKey } from '$lib/api/device.remote';
import { toast } from 'svelte-sonner';
import { invalidateAll } from '$app/navigation';
import type { Device } from '$lib/schema/device';
let { data } = $props();
let createDialogOpen = $state(false);
let visibleApiKeys = $state<Set<string>>(new Set());
let deleteConfirmDevice = $state<Device | null>(null);
let regenerateConfirmDevice = $state<Device | null>(null);
function toggleApiKeyVisibility(deviceId: string) {
if (visibleApiKeys.has(deviceId)) {
visibleApiKeys.delete(deviceId);
} else {
visibleApiKeys.add(deviceId);
}
visibleApiKeys = new Set(visibleApiKeys);
}
function maskApiKey(apiKey: string): string {
return '•'.repeat(Math.min(apiKey.length, 32));
}
async function copyToClipboard(text: string) {
try {
await navigator.clipboard.writeText(text);
toast.success('API key copied to clipboard');
} catch {
toast.error('Failed to copy to clipboard');
}
}
async function handleDelete(device: Device) {
try {
await deleteDevice(String(device.id));
toast.success(`Device '${device.name}' deleted`);
deleteConfirmDevice = null;
await invalidateAll();
} catch {
toast.error('Failed to delete device');
}
}
async function handleRegenerate(device: Device) {
try {
await regenerateDeviceApiKey(String(device.id));
toast.success(`API key regenerated for '${device.name}'`);
regenerateConfirmDevice = null;
await invalidateAll();
} catch {
toast.error('Failed to regenerate API key');
}
}
function handleDeviceCreated() {
createDialogOpen = false;
invalidateAll();
}
</script>
<div class="mb-4 flex items-center justify-between">
<div>
<h2 class="text-lg font-semibold">Devices</h2>
<p class="text-sm text-muted-foreground">Manage your KOReader devices</p>
</div>
<Dialog.Root bind:open={createDialogOpen}>
<Dialog.Trigger class={buttonVariants({ variant: 'outline' })}>
<Plus />
Add Device
</Dialog.Trigger>
<Dialog.Content>
<Dialog.Header>
<Dialog.Title>Add a new device</Dialog.Title>
<Dialog.Description>
Create a device to sync your KOReader reading progress.
</Dialog.Description>
</Dialog.Header>
<DeviceCreateForm onSuccess={handleDeviceCreated} />
</Dialog.Content>
</Dialog.Root>
</div>
{#if data.devices.length === 0}
<Empty.Root class="py-12">
<Empty.Header>
<Empty.Media variant="icon">
<Smartphone />
</Empty.Media>
<Empty.Title>No devices</Empty.Title>
<Empty.Description>
Add a device to sync your KOReader reading progress.
</Empty.Description>
</Empty.Header>
<Empty.Content>
<Button onclick={() => (createDialogOpen = true)}>
<Plus />
Add Device
</Button>
</Empty.Content>
</Empty.Root>
{:else}
<Card.Root>
<Card.Content class="p-0">
<Table.Root>
<Table.Header>
<Table.Row>
<Table.Head class="pl-4">Name</Table.Head>
<Table.Head>API Key</Table.Head>
<Table.Head class="w-16"></Table.Head>
</Table.Row>
</Table.Header>
<Table.Body>
{#each data.devices as device}
{@const isVisible = visibleApiKeys.has(String(device.id))}
<Table.Row class="h-14">
<Table.Cell class="pl-4 font-medium">
{device.name}
</Table.Cell>
<Table.Cell>
<div class="flex items-center gap-2">
<code class="rounded bg-muted px-2 py-1 font-mono text-sm">
{isVisible ? device.api_key : maskApiKey(device.api_key)}
</code>
<Button
variant="ghost"
size="icon"
class="h-8 w-8"
onclick={() => toggleApiKeyVisibility(String(device.id))}
>
{#if isVisible}
<EyeOff class="h-4 w-4" />
{:else}
<Eye class="h-4 w-4" />
{/if}
</Button>
<Button
variant="ghost"
size="icon"
class="h-8 w-8"
onclick={() => copyToClipboard(device.api_key)}
>
<Copy class="h-4 w-4" />
</Button>
</div>
</Table.Cell>
<Table.Cell class="text-center">
<DropdownMenu.Root>
<DropdownMenu.Trigger>
<Button variant="ghost" size="icon" class="h-8 w-8">
<EllipsisVertical class="h-4 w-4" />
</Button>
</DropdownMenu.Trigger>
<DropdownMenu.Content align="end">
<DropdownMenu.Item onclick={() => (regenerateConfirmDevice = device)}>
<RefreshCw class="mr-2 h-4 w-4" />
Regenerate API Key
</DropdownMenu.Item>
<DropdownMenu.Separator />
<DropdownMenu.Item
class="text-destructive focus:text-destructive"
onclick={() => (deleteConfirmDevice = device)}
>
<Trash2 class="mr-2 h-4 w-4" />
Delete
</DropdownMenu.Item>
</DropdownMenu.Content>
</DropdownMenu.Root>
</Table.Cell>
</Table.Row>
{/each}
</Table.Body>
</Table.Root>
</Card.Content>
</Card.Root>
{/if}
<!-- Delete Confirmation Dialog -->
<AlertDialog.Root open={deleteConfirmDevice !== null}>
<AlertDialog.Content>
<AlertDialog.Header>
<AlertDialog.Title>Delete device?</AlertDialog.Title>
<AlertDialog.Description>
Are you sure you want to delete "{deleteConfirmDevice?.name}"? This action cannot be undone.
The device will no longer be able to sync reading progress.
</AlertDialog.Description>
</AlertDialog.Header>
<AlertDialog.Footer>
<AlertDialog.Cancel onclick={() => (deleteConfirmDevice = null)}>Cancel</AlertDialog.Cancel>
<AlertDialog.Action
class={buttonVariants({ variant: 'destructive' })}
onclick={() => deleteConfirmDevice && handleDelete(deleteConfirmDevice)}
>
Delete
</AlertDialog.Action>
</AlertDialog.Footer>
</AlertDialog.Content>
</AlertDialog.Root>
<!-- Regenerate API Key Confirmation Dialog -->
<AlertDialog.Root open={regenerateConfirmDevice !== null}>
<AlertDialog.Content>
<AlertDialog.Header>
<AlertDialog.Title>Regenerate API key?</AlertDialog.Title>
<AlertDialog.Description>
This will invalidate the current API key for "{regenerateConfirmDevice?.name}".
You will need to update the key in your KOReader device settings.
</AlertDialog.Description>
</AlertDialog.Header>
<AlertDialog.Footer>
<AlertDialog.Cancel onclick={() => (regenerateConfirmDevice = null)}>Cancel</AlertDialog.Cancel>
<AlertDialog.Action
onclick={() => regenerateConfirmDevice && handleRegenerate(regenerateConfirmDevice)}
>
Regenerate
</AlertDialog.Action>
</AlertDialog.Footer>
</AlertDialog.Content>
</AlertDialog.Root>

View File

@@ -8,33 +8,39 @@
const libraryState = getLibraryState(); const libraryState = getLibraryState();
</script> </script>
<h1 class="text-lg font-semibold">Library Settings</h1> <div class="mb-4 flex items-center justify-between">
<p class="text-sm text-muted-foreground">Manage your libraries</p> <div>
<h2 class="text-lg font-semibold">Libraries</h2>
<Card.Root class="mt-6"> <p class="text-sm text-muted-foreground">Manage your libraries</p>
<Card.Content> </div>
<Card.Header class="mb-2 flex items-center"> <Button variant="outline" onclick={() => libraryState.openLibraryCreateDialog()}>
<Card.Title>Libraries</Card.Title>
<Button
variant="outline"
class="ml-auto"
onclick={() => libraryState.openLibraryCreateDialog()}
>
<Plus /> <Plus />
Add Library Add Library
</Button> </Button>
</Card.Header> </div>
<Card.Root>
<Card.Content class="p-0">
<Table.Root> <Table.Root>
<Table.Header>
<Table.Row>
<Table.Head class="w-16 pl-4"></Table.Head>
<Table.Head>Name</Table.Head>
<Table.Head class="w-16"></Table.Head>
</Table.Row>
</Table.Header>
<Table.Body> <Table.Body>
{#each libraryState.libraries as library} {#each libraryState.libraries as library}
<Table.Row class="h-16"> <Table.Row class="h-14">
<Table.Cell class="w-16 text-center text-lg font-semibold">{library.name[0]}</Table.Cell <Table.Cell class="w-16 pl-4 text-center text-lg font-semibold"
>{library.name[0]}</Table.Cell
> >
<Table.Cell class="font-medium" <Table.Cell class="font-medium">
><a href={`/library/${library.id}`} class="hover:underline">{library.name}</a <a href={`/library/${library.id}`} class="hover:underline">{library.name}</a>
></Table.Cell </Table.Cell>
> <Table.Cell class="w-16 text-center">
<Table.Cell class="w-16 text-center"><EllipsisVertical class="scale-75" /></Table.Cell> <EllipsisVertical class="scale-75" />
</Table.Cell>
</Table.Row> </Table.Row>
{/each} {/each}
</Table.Body> </Table.Body>