Compare commits

...

9 Commits

Author SHA1 Message Date
9711c68fbb chore: add migration for initial db tables 2026-03-09 14:28:35 -04:00
20df5ea140 remove python313Full package as it is no longer available in nixos 25.11 2026-03-09 14:26:33 -04:00
2e1d16ef9e refactor: move db and file watcher setup to use async context manager 2026-03-09 14:25:24 -04:00
2de0aac23a chore: update dependencies and ignore new warning in tests
ignore JWT related warning that was introduced when updating
dependencies related to JWT secret size (test value of "secret" is too
short).
2026-03-09 14:21:37 -04:00
10eaf84329 refactor: simplify settings page layout 2026-03-09 14:19:59 -04:00
a3e49dc918 regenerate OpenAPI schema with new KOSync models 2026-03-09 14:19:50 -04:00
3072f72958 feat: add device settings page
- Add device CRUD operations (create, delete, regenerate api keys)
- Add API key visibility toggle and copy button
2026-03-09 14:16:53 -04:00
51c1900d8c feat: add KOSync server
- Add KOSync device management
- Add API key auth middleware for devices to authenticate
- Add KOSync-compatible progress sync endpoints
- Add basic tests for KOSync compatible hashes
2026-03-09 14:11:21 -04:00
20a69de968 feat: add KOReader compatible hash to file metadata
Implement KOReader's partial MD5 algorithm for document identification. This hash allows KOReader devices to match local files with server records for reading progress synchronization (KOSync).
2026-03-09 13:38:07 -04:00
33 changed files with 2635 additions and 599 deletions

View File

@@ -0,0 +1 @@
Asynchronous SQLAlchemy configuration with Advanced Alchemy.

View File

@@ -1,12 +1,12 @@
import asyncio
from typing import TYPE_CHECKING, cast
from alembic.autogenerate import rewriter
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
from advanced_alchemy.base import metadata_registry
from alembic import context
from alembic.autogenerate import rewriter
if TYPE_CHECKING:
from sqlalchemy.engine import Connection

View File

@@ -0,0 +1,222 @@
"""create initial tables
Revision ID: 43bdf42a4f6c
Revises:
Create Date: 2026-03-08 16:00:54.727868
"""
import warnings
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC, StoredObject, PasswordHash, FernetBackend
from advanced_alchemy.types.encrypted_string import PGCryptoBackend
from advanced_alchemy.types.password_hash.argon2 import Argon2Hasher
from advanced_alchemy.types.password_hash.passlib import PasslibHasher
from advanced_alchemy.types.password_hash.pwdlib import PwdlibHasher
from pwdlib.hashers.argon2 import Argon2Hasher as PwdlibArgon2Hasher
from sqlalchemy import Text # noqa: F401
if TYPE_CHECKING:
from collections.abc import Sequence
__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
sa.GUID = GUID
sa.DateTimeUTC = DateTimeUTC
sa.ORA_JSONB = ORA_JSONB
sa.EncryptedString = EncryptedString
sa.EncryptedText = EncryptedText
sa.StoredObject = StoredObject
sa.PasswordHash = PasswordHash
sa.Argon2Hasher = Argon2Hasher
sa.PasslibHasher = PasslibHasher
sa.PwdlibHasher = PwdlibHasher
sa.FernetBackend = FernetBackend
sa.PGCryptoBackend = PGCryptoBackend
# revision identifiers, used by Alembic.
revision = '43bdf42a4f6c'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
with op.get_context().autocommit_block():
schema_upgrades()
data_upgrades()
def downgrade() -> None:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning)
with op.get_context().autocommit_block():
data_downgrades()
schema_downgrades()
def schema_upgrades() -> None:
"""schema upgrade migrations go here."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('password', sa.PasswordHash(backend=sa.PwdlibHasher(PwdlibArgon2Hasher()), length=128), nullable=False),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_users')),
sa.UniqueConstraint('email', name=op.f('uq_users_email'))
)
op.create_table('book_lists',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('library_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['library_id'], ['libraries.id'], name=op.f('fk_book_lists_library_id_libraries')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_book_lists_user_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_book_lists'))
)
op.create_table('books',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('library_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('subtitle', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('published_date', sa.Date(), nullable=True),
sa.Column('language', sa.String(), nullable=True),
sa.Column('pages', sa.Integer(), nullable=True),
sa.Column('cover_image', sa.String(), nullable=True),
sa.Column('edition', sa.Integer(), nullable=True),
sa.Column('path', sa.String(), nullable=True),
sa.Column('publisher_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
sa.Column('series_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=True),
sa.Column('series_position', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['library_id'], ['libraries.id'], name=op.f('fk_books_library_id_libraries')),
sa.ForeignKeyConstraint(['publisher_id'], ['publishers.id'], name=op.f('fk_books_publisher_id_publishers')),
sa.ForeignKeyConstraint(['series_id'], ['book_series.id'], name=op.f('fk_books_series_id_book_series')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_books'))
)
with op.batch_alter_table('books', schema=None) as batch_op:
batch_op.create_index('ix_books_title_trigram', ['title'], unique=False, postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'})
op.create_table('devices',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('api_key', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_devices_user_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_devices')),
sa.UniqueConstraint('api_key', name=op.f('uq_devices_api_key'))
)
op.create_table('book_author_links',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('author_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['author_id'], ['authors.id'], name=op.f('fk_book_author_links_author_id_authors')),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_author_links_book_id_books'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', 'book_id', 'author_id', name=op.f('pk_book_author_links'))
)
op.create_table('book_list_links',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('list_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_list_links_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['list_id'], ['book_lists.id'], name=op.f('fk_book_list_links_list_id_book_lists')),
sa.PrimaryKeyConstraint('id', 'book_id', 'list_id', name=op.f('pk_book_list_links'))
)
op.create_table('book_progress',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('epub_cfi', sa.String(), nullable=True),
sa.Column('epub_xpointer', sa.String(), nullable=True),
sa.Column('pdf_page', sa.Integer(), nullable=True),
sa.Column('percentage', sa.Float(), nullable=False),
sa.Column('completed', sa.Boolean(), nullable=True),
sa.Column('device', sa.String(), nullable=True),
sa.Column('device_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_progress_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_book_progress_user_id_users'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_book_progress'))
)
op.create_table('book_tag_link',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('tag_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_book_tag_link_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], name=op.f('fk_book_tag_link_tag_id_tags')),
sa.PrimaryKeyConstraint('id', 'book_id', 'tag_id', name=op.f('pk_book_tag_link'))
)
op.create_table('file_metadata',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('hash', sa.String(), nullable=False),
sa.Column('path', sa.String(), nullable=False),
sa.Column('size', sa.Integer(), nullable=False),
sa.Column('content_type', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_file_metadata_book_id_books'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_file_metadata'))
)
op.create_table('identifiers',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('value', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_identifiers_book_id_books'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', 'name', 'book_id', name=op.f('pk_identifiers'))
)
op.create_table('kosync_progress',
sa.Column('id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('user_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('book_id', sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), nullable=False),
sa.Column('document', sa.String(), nullable=False),
sa.Column('progress', sa.String(), nullable=True),
sa.Column('percentage', sa.Float(), nullable=True),
sa.Column('device', sa.String(), nullable=True),
sa.Column('device_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['book_id'], ['books.id'], name=op.f('fk_kosync_progress_book_id_books'), ondelete='cascade'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_kosync_progress_user_id_users'), ondelete='cascade'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_kosync_progress'))
)
# ### end Alembic commands ###
def schema_downgrades() -> None:
"""schema downgrade migrations go here."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('kosync_progress')
op.drop_table('identifiers')
op.drop_table('file_metadata')
op.drop_table('book_tag_link')
op.drop_table('book_progress')
op.drop_table('book_list_links')
op.drop_table('book_author_links')
op.drop_table('devices')
with op.batch_alter_table('books', schema=None) as batch_op:
batch_op.drop_index('ix_books_title_trigram', postgresql_using='gin', postgresql_ops={'title': 'gin_trgm_ops'})
op.drop_table('books')
op.drop_table('book_lists')
op.drop_table('users')
# ### end Alembic commands ###
def data_upgrades() -> None:
"""Add any optional data upgrade migrations here!"""
def data_downgrades() -> None:
"""Add any optional data downgrade migrations here!"""

View File

@@ -8,7 +8,7 @@ authors = [
]
requires-python = ">=3.13"
dependencies = [
"advanced-alchemy==1.8.0",
"advanced-alchemy>=1.8.0",
"aiofiles>=24.1.0",
"asyncpg>=0.30.0",
"ebooklib>=0.19",
@@ -41,3 +41,6 @@ dev = [
[tool.pytest.ini_options]
asyncio_mode = "auto"
testpaths = ["tests"]
filterwarnings = [
"ignore::jwt.warnings.InsecureKeyLengthWarning",
]

View File

@@ -3,7 +3,6 @@
pkgs.mkShell {
buildInputs = with pkgs; [
# Python development environment for Chitai
python313Full
python313Packages.greenlet
python313Packages.ruff
uv

View File

@@ -1,5 +1,6 @@
import asyncio
from typing import Any
from contextlib import asynccontextmanager
from typing import Any, AsyncGenerator
from chitai.services.book import BookService
from chitai.services.consume import ConsumeDirectoryWatcher
@@ -63,14 +64,15 @@ oauth2_auth = OAuth2PasswordBearerAuth[User](
"/access/signup",
"/opds",
"/schema",
"/syncs",
"/users/auth",
],
)
watcher_task: asyncio.Task
async def startup():
"""Run setup."""
@asynccontextmanager
async def setup_db_connection(app: Litestar) -> AsyncGenerator[None, None]:
# Setup databse
async with settings.alchemy_config.get_session() as db_session:
# Create default library if none exist
@@ -86,21 +88,30 @@ async def startup():
)
await db_session.commit()
# book_service = BookService(session=db_session)
try:
yield
finally:
await db_session.aclose()
@asynccontextmanager
async def setup_directory_watcher(app: Litestar) -> AsyncGenerator[None, None]:
# Create book covers directory if it does not exist
await create_directory(settings.book_cover_path)
# Create consume directory
await create_directory(settings.consume_path)
async with settings.alchemy_config.get_session() as db_session:
book_service = BookService(session=db_session)
library_service = LibraryService(session=db_session)
# file_watcher = ConsumeDirectoryWatcher(settings.consume_path, library_service, book_service)
# watcher_task = asyncio.create_task(file_watcher.init_watcher())
async def shutdown():
""" Run shutdown tasks. """
file_watcher = ConsumeDirectoryWatcher(settings.consume_path, library_service, book_service)
watcher_task = asyncio.create_task(file_watcher.init_watcher())
try:
yield
finally:
watcher_task.cancel()
def create_app() -> Litestar:
@@ -114,12 +125,14 @@ def create_app() -> Litestar:
c.PublisherController,
c.TagController,
c.OpdsController,
c.DeviceController,
c.KosyncController,
create_static_files_router(path="/covers", directories=["./covers"]),
index,
healthcheck,
],
exception_handlers=exception_handlers,
on_startup=[startup],
lifespan=[setup_db_connection, setup_directory_watcher],
plugins=[alchemy],
on_app_init=[oauth2_auth.on_app_init],
openapi_config=OpenAPIConfig(

View File

@@ -6,3 +6,5 @@ from .author import AuthorController
from .tag import TagController
from .publisher import PublisherController
from .opds import OpdsController
from .kosync_device import DeviceController
from .kosync_progress import KosyncController

View File

@@ -0,0 +1,54 @@
from advanced_alchemy.service.pagination import OffsetPagination
from chitai.database.models.kosync_device import KosyncDevice
from chitai.database.models.user import User
from chitai.schemas.kosync import KosyncDeviceCreate, KosyncDeviceRead
from chitai.services.kosync_device import KosyncDeviceService
from litestar import Controller, post, get, delete
from litestar.di import Provide
from chitai.services import dependencies as deps
class DeviceController(Controller):
""" Controller for managing KOReader devices."""
dependencies = {
"device_service": Provide(deps.provide_kosync_device_service)
}
path = "/devices"
@get()
async def get_devices(self, device_service: KosyncDeviceService, current_user: User) -> OffsetPagination[KosyncDeviceRead]:
""" Return a list of all the user's devices."""
devices = await device_service.list(
KosyncDevice.user_id == current_user.id
)
return device_service.to_schema(devices, schema_type=KosyncDeviceRead)
@post()
async def create_device(self, data: KosyncDeviceCreate, device_service: KosyncDeviceService, current_user: User) -> KosyncDeviceRead:
device = await device_service.create({
'name': data.name,
'user_id': current_user.id
})
return device_service.to_schema(device, schema_type=KosyncDeviceRead)
@delete("/{device_id:int}")
async def delete_device(self, device_id: int, device_service: KosyncDeviceService, current_user: User) -> None:
# Ensure the device exists and is owned by the user
device = await device_service.get_one(
KosyncDevice.id == device_id,
KosyncDevice.user_id == current_user.id
)
await device_service.delete(device.id)
@get("/{device_id:int}/regenerate")
async def regenerate_device_api_key(self, device_id: int, device_service: KosyncDeviceService, current_user: User) -> KosyncDeviceRead:
# Ensure the device exists and is owned by the user
device = await device_service.get_one(
KosyncDevice.id == device_id,
KosyncDevice.user_id == current_user.id
)
updated_device = await device_service.regenerate_api_key(device.id)
return device_service.to_schema(updated_device, schema_type=KosyncDeviceRead)

View File

@@ -0,0 +1,90 @@
from __future__ import annotations
from typing import Annotated
from litestar import Controller, get, put
from litestar.exceptions import HTTPException
from litestar.status_codes import HTTP_403_FORBIDDEN
from litestar.response import Response
from litestar.params import Parameter
from litestar.di import Provide
from chitai.database import models as m
from chitai.schemas.kosync import KosyncProgressUpdate, KosyncProgressRead
from chitai.services.book import BookService
from chitai.services.kosync_progress import KosyncProgressService
from chitai.services.filters.book import FileHashFilter
from chitai.services import dependencies as deps
from chitai.middleware.kosync_auth import kosync_api_key_auth
class KosyncController(Controller):
"""Controller for syncing progress with KOReader devices."""
middleware = [kosync_api_key_auth]
dependencies = {
"kosync_progress_service": Provide(deps.provide_kosync_progress_service),
"book_service": Provide(deps.provide_book_service),
"user": Provide(deps.provide_user_via_kosync_auth),
}
@put("/syncs/progress")
async def upload_progress(
self,
data: KosyncProgressUpdate,
book_service: BookService,
kosync_progress_service: KosyncProgressService,
user: m.User,
) -> None:
"""Upload book progress from a KOReader device."""
book = await book_service.get_one(FileHashFilter([data.document]))
await kosync_progress_service.upsert_progress(
user_id=user.id,
book_id=book.id,
document=data.document,
progress=data.progress,
percentage=data.percentage,
device=data.device,
device_id=data.device_id,
)
@get("/syncs/progress/{document_id:str}")
async def get_progress(
self,
document_id: str,
kosync_progress_service: KosyncProgressService,
user: m.User,
) -> KosyncProgressRead:
"""Return the Kosync progress record associated with the given document."""
progress = await kosync_progress_service.get_by_document_hash(user.id, document_id)
if not progress:
raise HTTPException(status_code=404, detail="No progress found for document")
return KosyncProgressRead(
document=progress.document,
progress=progress.progress,
percentage=progress.percentage,
device=progress.device,
device_id=progress.device_id,
)
@get("/users/auth")
async def authorize(
self, _api_key: Annotated[str, Parameter(header="X-AUTH-USER")]
) -> Response[dict[str, str]]:
"""Verify authentication (handled by middleware)."""
return Response(status_code=200, content={"authorized": "OK"})
@get("/users/register")
async def register(self) -> None:
"""User registration endpoint - disabled."""
raise HTTPException(
detail="User accounts must be created via the main application",
status_code=HTTP_403_FORBIDDEN,
)

View File

@@ -3,6 +3,8 @@ from .book import Book, Identifier, FileMetadata
from .book_list import BookList, BookListLink
from .book_progress import BookProgress
from .book_series import BookSeries
from .kosync_device import KosyncDevice
from .kosync_progress import KosyncProgress
from .library import Library
from .publisher import Publisher
from .tag import Tag, BookTagLink

View File

@@ -0,0 +1,15 @@
from sqlalchemy import ColumnElement, ForeignKey
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import mapped_column
from advanced_alchemy.base import BigIntAuditBase
class KosyncDevice(BigIntAuditBase):
__tablename__ = "devices"
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False)
api_key: Mapped[str] = mapped_column(unique=True)
name: Mapped[str]
def __repr__(self) -> str:
return f"KosyncDevice({self.name!r})"

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
from typing import Optional
from sqlalchemy import ForeignKey
from sqlalchemy.orm import Mapped, mapped_column
from advanced_alchemy.base import BigIntAuditBase
class KosyncProgress(BigIntAuditBase):
"""Progress tracking for KOReader devices, keyed by document hash."""
__tablename__ = "kosync_progress"
user_id: Mapped[int] = mapped_column(
ForeignKey("users.id", ondelete="cascade"), nullable=False
)
book_id: Mapped[int] = mapped_column(
ForeignKey("books.id", ondelete="cascade"), nullable=False
)
document: Mapped[str] = mapped_column(nullable=False)
progress: Mapped[Optional[str]]
percentage: Mapped[Optional[float]]
device: Mapped[Optional[str]]
device_id: Mapped[Optional[str]]

View File

@@ -0,0 +1,37 @@
from chitai.services.user import UserService
from chitai.services.kosync_device import KosyncDeviceService
from litestar.middleware import (
AbstractAuthenticationMiddleware,
AuthenticationResult,
DefineMiddleware
)
from litestar.connection import ASGIConnection
from litestar.exceptions import NotAuthorizedException, PermissionDeniedException
from chitai.config import settings
class KosyncAuthenticationMiddleware(AbstractAuthenticationMiddleware):
async def authenticate_request(self, connection: ASGIConnection) -> AuthenticationResult:
"""Given a request, parse the header for Base64 encoded basic auth credentials. """
# retrieve the auth header
api_key = connection.headers.get("X-AUTH-USER", None)
if not api_key:
raise NotAuthorizedException()
try:
db_session = settings.alchemy_config.provide_session(connection.app.state, connection.scope)
user_service = UserService(db_session)
device_service = KosyncDeviceService(db_session)
device = await device_service.get_by_api_key(api_key)
user = await user_service.get(device.user_id)
return AuthenticationResult(user=user, auth=None)
except PermissionDeniedException as exc:
print(exc)
raise NotAuthorizedException()
kosync_api_key_auth = DefineMiddleware(KosyncAuthenticationMiddleware)

View File

@@ -0,0 +1,36 @@
from pydantic import BaseModel
class KosyncProgressUpdate(BaseModel):
"""Schema for uploading progress from KOReader."""
document: str
progress: str | None = None
percentage: float
device: str | None = None
device_id: str | None = None
class KosyncProgressRead(BaseModel):
"""Schema for reading progress to KOReader."""
document: str
progress: str | None = None
percentage: float | None = None
device: str | None = None
device_id: str | None = None
class KosyncDeviceRead(BaseModel):
"""Schema for reading device information."""
id: int
user_id: int
api_key: str
name: str
class KosyncDeviceCreate(BaseModel):
"""Schema for creating a new device."""
name: str

View File

@@ -6,3 +6,5 @@ from .author import AuthorService
from .tag import TagService
from .publisher import PublisherService
from .book_progress import BookProgressService
from .kosync_device import KosyncDeviceService
from .kosync_progress import KosyncProgressService

View File

@@ -44,11 +44,13 @@ from chitai.schemas.book import BooksCreateFromFiles
from chitai.services.filesystem_library import BookPathGenerator
from chitai.services.metadata_extractor import Extractor as MetadataExtractor
from chitai.services.utils import (
calculate_koreader_hash,
cleanup_empty_parent_directories,
delete_file,
move_dir_contents,
move_file,
save_image,
StreamingHasher,
)
@@ -172,10 +174,10 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
file_metadata = []
for file in files:
stats = await aios.stat(file)
file_size = stats.st_size
content_type, _ = mimetypes.guess_type(file)
file_hash = await calculate_koreader_hash(file)
filename = path_gen.generate_filename(data, Path(file.name))
@@ -183,7 +185,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
FileMetadata(
path=str(filename),
size=file_size,
hash="stub-hash", # TODO: implement file hashing to catch duplicates
hash=file_hash,
content_type=content_type,
)
)
@@ -540,10 +542,13 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
await file.seek(0)
path = parent / filename
path.parent.mkdir(parents=True, exist_ok=True)
hasher = StreamingHasher()
async with aiofiles.open(path, "wb") as dest:
# Read spooled file and save it to the local filesystem
while chunk := await file.read(CHUNK_SIZE):
await dest.write(chunk)
hasher.update(chunk)
stats = await aios.stat(path)
file_size = stats.st_size
@@ -552,7 +557,7 @@ class BookService(SQLAlchemyAsyncRepositoryService[Book]):
FileMetadata(
path=str(filename),
size=file_size,
hash="stub-hash", # TODO: implement file hashing to catch duplicates
hash=hasher.hexdigest(),
content_type=file.content_type,
)
)

View File

@@ -36,6 +36,8 @@ from chitai.services import (
TagService,
AuthorService,
PublisherService,
KosyncDeviceService,
KosyncProgressService,
)
from chitai.config import settings
from chitai.services.filters.book import (
@@ -340,3 +342,12 @@ def provide_optional_user(request: Request[m.User, Token, Any]) -> m.User | None
async def provide_user_via_basic_auth(request: Request[m.User, None, Any]) -> m.User:
return request.user
async def provide_user_via_kosync_auth(request: Request[m.User, None, Any]) -> m.User:
return request.user
provide_kosync_device_service = create_service_provider(KosyncDeviceService)
provide_kosync_progress_service = create_service_provider(KosyncProgressService)

View File

@@ -138,7 +138,7 @@ class ProgressFilter(StatementFilter):
m.BookProgress.completed == False,
m.BookProgress.completed.is_(None),
),
m.BookProgress.progress > 0,
m.BookProgress.percentage > 0,
)
)
@@ -154,7 +154,6 @@ class ProgressFilter(StatementFilter):
@dataclass
class FileFilter(StatementFilter):
"""Filter books that are related to the given files."""
file_ids: list[int]
def append_to_statement(
@@ -166,6 +165,16 @@ class FileFilter(StatementFilter):
return super().append_to_statement(statement, model, *args, **kwargs)
@dataclass
class FileHashFilter(StatementFilter):
file_hashes: list[str]
def append_to_statement(self, statement: StatementTypeT, model: type[ModelT], *args, **kwargs) -> StatementTypeT:
statement = statement.where(
m.Book.files.any(m.FileMetadata.hash.in_(self.file_hashes))
)
return super().append_to_statement(statement, model, *args, **kwargs)
@dataclass
class CustomOrderBy(StatementFilter):

View File

@@ -0,0 +1,35 @@
from __future__ import annotations
import secrets
from chitai.database.models.kosync_device import KosyncDevice
from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, ModelDictT, schema_dump
from advanced_alchemy.repository import SQLAlchemyAsyncRepository
class KosyncDeviceService(SQLAlchemyAsyncRepositoryService[KosyncDevice]):
"""Service for managing KOReader devices."""
API_KEY_LENGTH_IN_BYTES = 8
class Repo(SQLAlchemyAsyncRepository[KosyncDevice]):
""" Repository for KosyncDevice entities."""
model_type = KosyncDevice
repository_type = Repo
async def create(self, data: ModelDictT[KosyncDevice], **kwargs) -> KosyncDevice:
data = schema_dump(data)
data['api_key'] = self._generate_api_key()
return await super().create(data, **kwargs)
async def get_by_api_key(self, api_key: str) -> KosyncDevice:
return await self.get_one(KosyncDevice.api_key == api_key)
async def regenerate_api_key(self, device_id: int) -> KosyncDevice:
device = await self.get(device_id)
api_key = self._generate_api_key()
device.api_key = api_key
return await self.update(device)
def _generate_api_key(self) -> str:
return secrets.token_hex(self.API_KEY_LENGTH_IN_BYTES)

View File

@@ -0,0 +1,51 @@
from __future__ import annotations
from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService
from advanced_alchemy.repository import SQLAlchemyAsyncRepository
from chitai.database.models.kosync_progress import KosyncProgress
class KosyncProgressService(SQLAlchemyAsyncRepositoryService[KosyncProgress]):
"""Service for managing KOReader sync progress."""
class Repo(SQLAlchemyAsyncRepository[KosyncProgress]):
"""Repository for KosyncProgress entities."""
model_type = KosyncProgress
repository_type = Repo
async def get_by_document_hash(self, user_id: int, document: str) -> KosyncProgress | None:
"""Get progress for a specific document and user."""
return await self.get_one_or_none(
KosyncProgress.user_id == user_id,
KosyncProgress.document == document,
)
async def upsert_progress(
self,
user_id: int,
book_id: int,
document: str,
progress: str | None,
percentage: float,
device: str | None = None,
device_id: str | None = None,
) -> KosyncProgress:
"""Create or update progress for a document."""
existing = await self.get_by_document_hash(user_id, document)
data = {
"user_id": user_id,
"book_id": book_id,
"document": document,
"progress": progress,
"percentage": percentage,
"device": device,
"device_id": device_id,
}
if existing:
return await self.update(data, item_id=existing.id)
return await self.create(data)

View File

@@ -1,9 +1,15 @@
# src/chitai/services/utils.py
# Standard library
from __future__ import annotations
import hashlib
from pathlib import Path
import shutil
from typing import BinaryIO
from typing import TYPE_CHECKING, BinaryIO
if TYPE_CHECKING:
from hashlib import _Hash
# Third-party libraries
import PIL
@@ -12,6 +18,120 @@ import aiofiles
import aiofiles.os as aios
from litestar.datastructures import UploadFile
##################################
# KOReader file hash utilities #
##################################
# KOReader partial MD5 constants
# These match KOReader's partial MD5 implementation for document identification
# KOReader samples 1024 bytes at specific offsets calculated using 32-bit left shift.
# The shift wrapping behavior (shift & 0x1F) causes i=-1 to produce offset 0.
# Offsets: 0, 1024, 4096, 16384, 65536, 262144, 1048576, ...
KO_STEP = 1024
KO_SAMPLE_SIZE = 1024
KO_INDICES = range(-1, 11) # -1 to 10 inclusive
def _lshift32(val: int, shift: int) -> int:
"""
32-bit left shift matching LuaJIT's bit.lshift behavior.
LuaJIT masks the shift amount to 5 bits (0-31) and performs 32-bit arithmetic.
This causes negative shifts to wrap: shift=-2 becomes shift=30, and
1024 << 30 overflows 32 bits to produce 0.
"""
val &= 0xFFFFFFFF
shift &= 0x1F
return (val << shift) & 0xFFFFFFFF
def _get_koreader_offsets() -> list[int]:
"""Get all KOReader sampling offsets."""
return [_lshift32(KO_STEP, 2 * i) for i in KO_INDICES]
def _partial_md5_from_chunk(
chunk: bytes,
hasher: hashlib._Hash,
offsets: list[int],
chunk_start: int,
) -> None:
"""
Update partial MD5 hasher with sampled bytes from a chunk.
KOReader samples 1024 bytes at specific offsets rather than hashing
the entire file. This function checks if any sampling offsets fall
within the current chunk and updates the hasher with those bytes.
Args:
chunk: The current chunk of file data.
hasher: The MD5 hasher to update.
offsets: List of byte offsets to sample from the file.
chunk_start: The starting byte position of this chunk in the file.
"""
chunk_len = len(chunk)
for offset in offsets:
if chunk_start <= offset < chunk_start + chunk_len:
start = offset - chunk_start
end = min(start + KO_SAMPLE_SIZE, chunk_len)
hasher.update(chunk[start:end])
async def calculate_koreader_hash(file_path: Path) -> str:
"""
Calculate KOReader-compatible partial MD5 hash for a file.
KOReader uses a partial MD5 algorithm that samples 1024 bytes at specific
offsets rather than hashing the entire file. This provides fast document
identification for large ebook files.
The offsets are calculated using 32-bit left shift: 1024 << (2*i) for i from -1 to 10.
Due to 32-bit overflow, i=-1 produces offset 0:
0, 1024, 4096, 16384, 65536, 262144, 1048576, 4194304, ...
Args:
file_path: Path to the file to hash.
Returns:
The hexadecimal MD5 hash string.
"""
hasher = hashlib.md5()
offsets = _get_koreader_offsets()
file_pos = 0
chunk_size = 262144 # 256 KiB
async with aiofiles.open(file_path, "rb") as f:
while chunk := await f.read(chunk_size):
_partial_md5_from_chunk(chunk, hasher, offsets, file_pos)
file_pos += len(chunk)
return hasher.hexdigest()
class StreamingHasher:
"""
Helper class for calculating KOReader hash while streaming file data.
Allows hash calculation during file writes without needing to re-read
the file after writing.
"""
def __init__(self) -> None:
self.hasher = hashlib.md5()
self.offsets = _get_koreader_offsets()
self.position = 0
def update(self, chunk: bytes) -> None:
"""Update hash with a chunk of data."""
_partial_md5_from_chunk(chunk, self.hasher, self.offsets, self.position)
self.position += len(chunk)
def hexdigest(self) -> str:
"""Return the final hash."""
return self.hasher.hexdigest()
##################################
# Filesystem related utilities #
##################################

View File

@@ -202,6 +202,26 @@ async def bookshelf_service(
yield service
@pytest.fixture
async def kosync_progress_service(
sessionmaker: async_sessionmaker[AsyncSession],
) -> AsyncGenerator[services.KosyncProgressService, None]:
"""Create KosyncProgressService instance."""
async with sessionmaker() as session:
async with services.KosyncProgressService.new(session) as service:
yield service
@pytest.fixture
async def kosync_device_service(
sessionmaker: async_sessionmaker[AsyncSession],
) -> AsyncGenerator[services.KosyncDeviceService, None]:
"""Create KosyncDeviceService instance."""
async with sessionmaker() as session:
async with services.KosyncDeviceService.new(session) as service:
yield service
# Data fixtures

View File

@@ -0,0 +1,98 @@
"""Tests for KOReader-compatible file hash generation."""
import pytest
from httpx import AsyncClient
from pathlib import Path
# Known KOReader hashes for test files
TEST_FILES = {
"Moby Dick; Or, The Whale - Herman Melville.epub": {
"path": Path("tests/data_files/Moby Dick; Or, The Whale - Herman Melville.epub"),
"hash": "ceeef909ec65653ba77e1380dff998fb",
"content_type": "application/epub+zip",
},
"Calculus Made Easy - Silvanus Thompson.pdf": {
"path": Path("tests/data_files/Calculus Made Easy - Silvanus Thompson.pdf"),
"hash": "ace67d512efd1efdea20f3c2436b6075",
"content_type": "application/pdf",
},
}
@pytest.mark.parametrize(
("book_name",),
[(name,) for name in TEST_FILES.keys()],
)
async def test_upload_book_generates_correct_hash(
authenticated_client: AsyncClient,
book_name: str,
) -> None:
"""Test that uploading a book generates the correct KOReader-compatible hash."""
book_info = TEST_FILES[book_name]
file_content = book_info["path"].read_bytes()
files = [("files", (book_name, file_content, book_info["content_type"]))]
data = {"library_id": "1"}
response = await authenticated_client.post(
"/books?library_id=1",
files=files,
data=data,
)
assert response.status_code == 201
book_data = response.json()
assert len(book_data["files"]) == 1
file_metadata = book_data["files"][0]
assert "hash" in file_metadata
assert file_metadata["hash"] == book_info["hash"]
async def test_add_file_to_book_generates_correct_hash(
authenticated_client: AsyncClient,
) -> None:
"""Test that adding a file to an existing book generates the correct hash."""
# Create a book with the first file
first_book = TEST_FILES["Moby Dick; Or, The Whale - Herman Melville.epub"]
first_content = first_book["path"].read_bytes()
files = [("files", (first_book["path"].name, first_content, first_book["content_type"]))]
data = {"library_id": "1"}
create_response = await authenticated_client.post(
"/books?library_id=1",
files=files,
data=data,
)
assert create_response.status_code == 201
book_id = create_response.json()["id"]
# Add the second file to the book
second_book = TEST_FILES["Calculus Made Easy - Silvanus Thompson.pdf"]
second_content = second_book["path"].read_bytes()
add_files = [("data", (second_book["path"].name, second_content, second_book["content_type"]))]
add_response = await authenticated_client.post(
f"/books/{book_id}/files",
files=add_files,
)
assert add_response.status_code == 201
updated_book = add_response.json()
# Verify both files have correct hashes
assert len(updated_book["files"]) == 2
for file_metadata in updated_book["files"]:
assert "hash" in file_metadata
epub_file = next(f for f in updated_book["files"] if f["path"].endswith(".epub"))
pdf_file = next(f for f in updated_book["files"] if f["path"].endswith(".pdf"))
assert epub_file["hash"] == first_book["hash"]
assert pdf_file["hash"] == second_book["hash"]

954
backend/uv.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,43 @@
import { command, form, getRequestEvent, query } from '$app/server';
import { createDeviceSchema, type Device } from '$lib/schema/device';
import { error } from '@sveltejs/kit';
import z from 'zod';
export const listDevices = query(async (): Promise<Device[]> => {
const { locals } = getRequestEvent();
const response = await locals.api.get(`/devices`);
if (!response.ok) error(500, 'An unkown error occurred');
const deviceResult = await response.json();
return deviceResult.items
});
export const createDevice = form(createDeviceSchema, async (data): Promise<Device> => {
const { locals } = getRequestEvent();
const response = await locals.api.post(`/devices`, data);
if (!response.ok) error(500, 'An unknown error occurred');
return await response.json();
});
export const regenerateDeviceApiKey = command(z.string(), async (deviceId): Promise<Device> => {
const { locals } = getRequestEvent();
const response = await locals.api.get(`/devices/${deviceId}/regenerate`);
if (!response.ok) error(500, 'An unknown error occurred');
return await response.json();
})
export const deleteDevice = command(z.string(), async (deviceId): Promise<void> => {
const { locals } = getRequestEvent();
const response = await locals.api.delete(`/devices/${deviceId}`);
if (!response.ok) error(500, 'An unknown error occurred');
});

View File

@@ -0,0 +1,50 @@
<script lang="ts">
import { createDevice } from '$lib/api/device.remote';
import * as Field from '$lib/components/ui/field/index.js';
import { Input } from '$lib/components/ui/input/index.js';
import { Button } from '$lib/components/ui/button/index.js';
import { toast } from 'svelte-sonner';
interface Props {
onSuccess?: () => void;
}
let { onSuccess }: Props = $props();
</script>
<form
{...createDevice.enhance(async ({ form, submit }) => {
try {
await submit();
const issues = createDevice.fields.allIssues();
if (issues && issues.length > 0) {
return;
}
const deviceName = createDevice.fields.name.value();
form.reset();
toast.success(`Device '${deviceName}' created`);
onSuccess?.();
} catch (error) {
console.error('Failed to create device: ', error);
toast.error('Failed to create device');
}
})}
class="flex flex-col gap-3"
>
<Field.Set>
<Field.Group>
<Field.Field>
<Field.Label for="name">Device name</Field.Label>
<Input {...createDevice.fields.name.as('text')} placeholder="e.g. Kindle Paperwhite" />
{#each createDevice.fields.name.issues() ?? [] as issue}
<Field.Error>{issue.message}</Field.Error>
{/each}
</Field.Field>
</Field.Group>
</Field.Set>
<Button type="submit" class="ml-auto w-24">Create</Button>
</form>

View File

@@ -0,0 +1,8 @@
import { z } from 'zod';
import type { components } from './openapi/schema';
export type Device = components['schemas']['KosyncDeviceRead']
export const createDeviceSchema = z.object({
name: z.string().min(1, 'Name cannot be empty')
})

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,5 @@
<script lang="ts">
import * as Sidebar from '$lib/components/ui/sidebar/index.js';
import { page } from '$app/state';
import Separator from '$lib/components/ui/separator/separator.svelte';
let { children } = $props();
const items = [
@@ -12,40 +10,29 @@
{
title: 'Libraries',
url: '/settings/libraries'
},
{
title: 'Devices',
url: '/settings/devices'
}
];
</script>
<div class="flex h-[calc(100vh-var(--header-height)-2rem)] flex-col">
<h1 class="text-xl font-bold">Settings</h1>
<Separator class="my-4" />
<div class="flex flex-1 gap-6 overflow-hidden">
<div class="flex gap-8">
<Sidebar.Provider class="shrink-0">
<Sidebar.Inset>
<Sidebar.Content>
<Sidebar.Group class="flex flex-col gap-1">
<div class="mt-6 flex flex-1 gap-8 overflow-hidden">
<nav class="flex w-48 shrink-0 flex-col gap-1">
{#each items as item}
<Sidebar.MenuItem class="w-48">
<Sidebar.MenuButton
class="rounded hover:bg-muted {page.url.pathname.endsWith(item.url)
<a
href={item.url}
class="rounded-md px-3 py-2 text-sm font-medium transition-colors hover:bg-muted {page.url.pathname.endsWith(item.url)
? 'bg-muted'
: ''}"
: 'text-muted-foreground'}"
>
{#snippet child({ props })}
<a href={item.url} {...props}>
<span class="text-base">{item.title}</span>
{item.title}
</a>
{/snippet}
</Sidebar.MenuButton>
</Sidebar.MenuItem>
{/each}
</Sidebar.Group>
</Sidebar.Content>
</Sidebar.Inset>
</Sidebar.Provider>
</div>
<Separator orientation="vertical" class="self-stretch" />
<div class="mx-2 mt-2 flex-1 overflow-auto">{@render children()}</div>
</nav>
<div class="flex-1 overflow-auto">{@render children()}</div>
</div>
</div>

View File

@@ -0,0 +1,9 @@
import { listDevices } from "$lib/api/device.remote";
export async function load() {
const devices = await listDevices();
return {
devices
};
}

View File

@@ -0,0 +1,240 @@
<script lang="ts">
import * as Table from '$lib/components/ui/table/index.js';
import * as Card from '$lib/components/ui/card/index.js';
import * as Dialog from '$lib/components/ui/dialog/index.js';
import * as DropdownMenu from '$lib/components/ui/dropdown-menu/index.js';
import * as AlertDialog from '$lib/components/ui/alert-dialog/index.js';
import * as Empty from '$lib/components/ui/empty/index.js';
import { Button, buttonVariants } from '$lib/components/ui/button/index.js';
import {
EllipsisVertical,
Plus,
Eye,
EyeOff,
Copy,
RefreshCw,
Trash2,
Smartphone
} from '@lucide/svelte';
import DeviceCreateForm from '$lib/components/forms/device-create-form.svelte';
import { deleteDevice, regenerateDeviceApiKey } from '$lib/api/device.remote';
import { toast } from 'svelte-sonner';
import { invalidateAll } from '$app/navigation';
import type { Device } from '$lib/schema/device';
let { data } = $props();
let createDialogOpen = $state(false);
let visibleApiKeys = $state<Set<string>>(new Set());
let deleteConfirmDevice = $state<Device | null>(null);
let regenerateConfirmDevice = $state<Device | null>(null);
function toggleApiKeyVisibility(deviceId: string) {
if (visibleApiKeys.has(deviceId)) {
visibleApiKeys.delete(deviceId);
} else {
visibleApiKeys.add(deviceId);
}
visibleApiKeys = new Set(visibleApiKeys);
}
function maskApiKey(apiKey: string): string {
return '•'.repeat(Math.min(apiKey.length, 32));
}
async function copyToClipboard(text: string) {
try {
await navigator.clipboard.writeText(text);
toast.success('API key copied to clipboard');
} catch {
toast.error('Failed to copy to clipboard');
}
}
async function handleDelete(device: Device) {
try {
await deleteDevice(String(device.id));
toast.success(`Device '${device.name}' deleted`);
deleteConfirmDevice = null;
await invalidateAll();
} catch {
toast.error('Failed to delete device');
}
}
async function handleRegenerate(device: Device) {
try {
await regenerateDeviceApiKey(String(device.id));
toast.success(`API key regenerated for '${device.name}'`);
regenerateConfirmDevice = null;
await invalidateAll();
} catch {
toast.error('Failed to regenerate API key');
}
}
function handleDeviceCreated() {
createDialogOpen = false;
invalidateAll();
}
</script>
<div class="mb-4 flex items-center justify-between">
<div>
<h2 class="text-lg font-semibold">Devices</h2>
<p class="text-sm text-muted-foreground">Manage your KOReader devices</p>
</div>
<Dialog.Root bind:open={createDialogOpen}>
<Dialog.Trigger class={buttonVariants({ variant: 'outline' })}>
<Plus />
Add Device
</Dialog.Trigger>
<Dialog.Content>
<Dialog.Header>
<Dialog.Title>Add a new device</Dialog.Title>
<Dialog.Description>
Create a device to sync your KOReader reading progress.
</Dialog.Description>
</Dialog.Header>
<DeviceCreateForm onSuccess={handleDeviceCreated} />
</Dialog.Content>
</Dialog.Root>
</div>
{#if data.devices.length === 0}
<Empty.Root class="py-12">
<Empty.Header>
<Empty.Media variant="icon">
<Smartphone />
</Empty.Media>
<Empty.Title>No devices</Empty.Title>
<Empty.Description>
Add a device to sync your KOReader reading progress.
</Empty.Description>
</Empty.Header>
<Empty.Content>
<Button onclick={() => (createDialogOpen = true)}>
<Plus />
Add Device
</Button>
</Empty.Content>
</Empty.Root>
{:else}
<Card.Root>
<Card.Content class="p-0">
<Table.Root>
<Table.Header>
<Table.Row>
<Table.Head class="pl-4">Name</Table.Head>
<Table.Head>API Key</Table.Head>
<Table.Head class="w-16"></Table.Head>
</Table.Row>
</Table.Header>
<Table.Body>
{#each data.devices as device}
{@const isVisible = visibleApiKeys.has(String(device.id))}
<Table.Row class="h-14">
<Table.Cell class="pl-4 font-medium">
{device.name}
</Table.Cell>
<Table.Cell>
<div class="flex items-center gap-2">
<code class="rounded bg-muted px-2 py-1 font-mono text-sm">
{isVisible ? device.api_key : maskApiKey(device.api_key)}
</code>
<Button
variant="ghost"
size="icon"
class="h-8 w-8"
onclick={() => toggleApiKeyVisibility(String(device.id))}
>
{#if isVisible}
<EyeOff class="h-4 w-4" />
{:else}
<Eye class="h-4 w-4" />
{/if}
</Button>
<Button
variant="ghost"
size="icon"
class="h-8 w-8"
onclick={() => copyToClipboard(device.api_key)}
>
<Copy class="h-4 w-4" />
</Button>
</div>
</Table.Cell>
<Table.Cell class="text-center">
<DropdownMenu.Root>
<DropdownMenu.Trigger>
<Button variant="ghost" size="icon" class="h-8 w-8">
<EllipsisVertical class="h-4 w-4" />
</Button>
</DropdownMenu.Trigger>
<DropdownMenu.Content align="end">
<DropdownMenu.Item onclick={() => (regenerateConfirmDevice = device)}>
<RefreshCw class="mr-2 h-4 w-4" />
Regenerate API Key
</DropdownMenu.Item>
<DropdownMenu.Separator />
<DropdownMenu.Item
class="text-destructive focus:text-destructive"
onclick={() => (deleteConfirmDevice = device)}
>
<Trash2 class="mr-2 h-4 w-4" />
Delete
</DropdownMenu.Item>
</DropdownMenu.Content>
</DropdownMenu.Root>
</Table.Cell>
</Table.Row>
{/each}
</Table.Body>
</Table.Root>
</Card.Content>
</Card.Root>
{/if}
<!-- Delete Confirmation Dialog -->
<AlertDialog.Root open={deleteConfirmDevice !== null}>
<AlertDialog.Content>
<AlertDialog.Header>
<AlertDialog.Title>Delete device?</AlertDialog.Title>
<AlertDialog.Description>
Are you sure you want to delete "{deleteConfirmDevice?.name}"? This action cannot be undone.
The device will no longer be able to sync reading progress.
</AlertDialog.Description>
</AlertDialog.Header>
<AlertDialog.Footer>
<AlertDialog.Cancel onclick={() => (deleteConfirmDevice = null)}>Cancel</AlertDialog.Cancel>
<AlertDialog.Action
class={buttonVariants({ variant: 'destructive' })}
onclick={() => deleteConfirmDevice && handleDelete(deleteConfirmDevice)}
>
Delete
</AlertDialog.Action>
</AlertDialog.Footer>
</AlertDialog.Content>
</AlertDialog.Root>
<!-- Regenerate API Key Confirmation Dialog -->
<AlertDialog.Root open={regenerateConfirmDevice !== null}>
<AlertDialog.Content>
<AlertDialog.Header>
<AlertDialog.Title>Regenerate API key?</AlertDialog.Title>
<AlertDialog.Description>
This will invalidate the current API key for "{regenerateConfirmDevice?.name}".
You will need to update the key in your KOReader device settings.
</AlertDialog.Description>
</AlertDialog.Header>
<AlertDialog.Footer>
<AlertDialog.Cancel onclick={() => (regenerateConfirmDevice = null)}>Cancel</AlertDialog.Cancel>
<AlertDialog.Action
onclick={() => regenerateConfirmDevice && handleRegenerate(regenerateConfirmDevice)}
>
Regenerate
</AlertDialog.Action>
</AlertDialog.Footer>
</AlertDialog.Content>
</AlertDialog.Root>

View File

@@ -8,33 +8,39 @@
const libraryState = getLibraryState();
</script>
<h1 class="text-lg font-semibold">Library Settings</h1>
<div class="mb-4 flex items-center justify-between">
<div>
<h2 class="text-lg font-semibold">Libraries</h2>
<p class="text-sm text-muted-foreground">Manage your libraries</p>
<Card.Root class="mt-6">
<Card.Content>
<Card.Header class="mb-2 flex items-center">
<Card.Title>Libraries</Card.Title>
<Button
variant="outline"
class="ml-auto"
onclick={() => libraryState.openLibraryCreateDialog()}
>
</div>
<Button variant="outline" onclick={() => libraryState.openLibraryCreateDialog()}>
<Plus />
Add Library
</Button>
</Card.Header>
</div>
<Card.Root>
<Card.Content class="p-0">
<Table.Root>
<Table.Header>
<Table.Row>
<Table.Head class="w-16 pl-4"></Table.Head>
<Table.Head>Name</Table.Head>
<Table.Head class="w-16"></Table.Head>
</Table.Row>
</Table.Header>
<Table.Body>
{#each libraryState.libraries as library}
<Table.Row class="h-16">
<Table.Cell class="w-16 text-center text-lg font-semibold">{library.name[0]}</Table.Cell
<Table.Row class="h-14">
<Table.Cell class="w-16 pl-4 text-center text-lg font-semibold"
>{library.name[0]}</Table.Cell
>
<Table.Cell class="font-medium"
><a href={`/library/${library.id}`} class="hover:underline">{library.name}</a
></Table.Cell
>
<Table.Cell class="w-16 text-center"><EllipsisVertical class="scale-75" /></Table.Cell>
<Table.Cell class="font-medium">
<a href={`/library/${library.id}`} class="hover:underline">{library.name}</a>
</Table.Cell>
<Table.Cell class="w-16 text-center">
<EllipsisVertical class="scale-75" />
</Table.Cell>
</Table.Row>
{/each}
</Table.Body>