feat: add images store

- Image queue with concurrent processing
- File validation (format, size)
- Processing status tracking
- Pipeline override per image
- Batch reprocessing

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2026-05-13 18:12:56 -04:00
parent d02c8d281a
commit 0b1cfc5666

View File

@@ -0,0 +1,219 @@
import type { ImageEntry, Dimensions, Device, PipelineConfig } from '$lib/types';
import { CONSTRAINTS, isSupportedFormat, DEFAULT_PIPELINE_CONFIG } from '$lib/types';
import { processImageWithPipeline } from '$lib/processing/pipeline';
function createImagesStore() {
let images = $state<ImageEntry[]>([]);
const processingQueue: Array<{ id: string; file: File; device: Device }> = [];
let isProcessingQueue = false;
const MAX_CONCURRENT = 2;
let activeProcessing = 0;
const pendingCount = $derived(images.filter((img) => img.status === 'pending').length);
const processingCount = $derived(images.filter((img) => img.status === 'processing').length);
const completeCount = $derived(images.filter((img) => img.status === 'complete').length);
const errorCount = $derived(images.filter((img) => img.status === 'error').length);
const hasImages = $derived(images.length > 0);
const completedImages = $derived(images.filter((img) => img.status === 'complete'));
async function processQueue(): Promise<void> {
if (isProcessingQueue) return;
isProcessingQueue = true;
while (processingQueue.length > 0 && activeProcessing < MAX_CONCURRENT) {
const job = processingQueue.shift();
if (!job) break;
activeProcessing++;
processImageEntry(job.id, job.file, job.device).finally(() => {
activeProcessing--;
if (processingQueue.length > 0) {
processQueue();
}
});
}
isProcessingQueue = false;
}
async function addImages(files: FileList | File[], device: Device): Promise<void> {
const fileArray = Array.from(files);
for (const file of fileArray) {
if (!isSupportedFormat(file)) {
const entry = createErrorEntry(file, 'Unsupported format. Use JPG, PNG, or WebP');
images = [...images, entry];
continue;
}
if (file.size > CONSTRAINTS.MAX_FILE_SIZE_BYTES) {
const entry = createErrorEntry(file, `File exceeds ${CONSTRAINTS.MAX_FILE_SIZE_MB}MB limit`);
images = [...images, entry];
continue;
}
const entry = createPendingEntry(file);
images = [...images, entry];
processingQueue.push({ id: entry.id, file, device });
}
processQueue();
}
function createErrorEntry(file: File, error: string): ImageEntry {
return {
id: crypto.randomUUID(),
file,
filename: file.name,
originalDimensions: null,
originalDataUrl: null,
processedBlob: null,
processedDataUrl: null,
status: 'error',
error,
pipelineOverride: null
};
}
function createPendingEntry(file: File): ImageEntry {
return {
id: crypto.randomUUID(),
file,
filename: file.name,
originalDimensions: null,
originalDataUrl: null,
processedBlob: null,
processedDataUrl: null,
status: 'pending',
error: null,
pipelineOverride: null
};
}
async function processImageEntry(id: string, file: File, device: Device): Promise<void> {
updateImage(id, { status: 'processing' });
try {
const originalDataUrl = await readFileAsDataUrl(file);
const originalDimensions = await getImageDimensions(originalDataUrl);
updateImage(id, { originalDataUrl, originalDimensions });
const img = getImage(id);
const pipelineConfig = img?.pipelineOverride ?? DEFAULT_PIPELINE_CONFIG;
const { blob, dataUrl } = await processImageWithPipeline(file, device, pipelineConfig);
updateImage(id, {
processedBlob: blob,
processedDataUrl: dataUrl,
status: 'complete',
error: null
});
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Processing failed';
updateImage(id, { status: 'error', error: errorMessage });
}
}
function readFileAsDataUrl(file: File): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => resolve(reader.result as string);
reader.onerror = () => reject(new Error('Failed to read file'));
reader.readAsDataURL(file);
});
}
function getImageDimensions(dataUrl: string): Promise<Dimensions> {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve({ width: img.naturalWidth, height: img.naturalHeight });
img.onerror = () => reject(new Error('Failed to load image'));
img.src = dataUrl;
});
}
function updateImage(id: string, updates: Partial<ImageEntry>): void {
images = images.map((img) => (img.id === id ? { ...img, ...updates } : img));
}
function removeImage(id: string): void {
images = images.filter((img) => img.id !== id);
}
function clearAll(): void {
images = [];
}
async function reprocessAll(device: Device): Promise<void> {
const toReprocess = images.filter((img) => img.file && img.status !== 'error');
for (const img of toReprocess) {
updateImage(img.id, {
status: 'processing',
processedBlob: null,
processedDataUrl: null,
error: null
});
processImageEntry(img.id, img.file, device);
}
}
function getImage(id: string): ImageEntry | undefined {
return images.find((img) => img.id === id);
}
function setPipelineOverride(id: string, config: PipelineConfig | null): void {
updateImage(id, { pipelineOverride: config });
}
async function reprocessImage(id: string, device: Device): Promise<void> {
const img = getImage(id);
if (!img || !img.file) return;
updateImage(id, {
status: 'processing',
processedBlob: null,
processedDataUrl: null,
error: null
});
processImageEntry(id, img.file, device);
}
return {
get images() {
return images;
},
get pendingCount() {
return pendingCount;
},
get processingCount() {
return processingCount;
},
get completeCount() {
return completeCount;
},
get errorCount() {
return errorCount;
},
get hasImages() {
return hasImages;
},
get completedImages() {
return completedImages;
},
addImages,
updateImage,
removeImage,
clearAll,
reprocessAll,
getImage,
setPipelineOverride,
reprocessImage
};
}
export const imagesStore = createImagesStore();