mirror of
https://github.com/Stirling-Tools/Stirling-PDF.git
synced 2025-08-26 14:19:24 +00:00
feat: Enhance file handling and processing capabilities
- Implement thumbnail caching in PageThumbnail component to improve performance. - Update ConvertSettings to map selected files to their corresponding IDs in FileContext. - Refactor FileContext to support quick deduplication using a new quickKey mechanism. - Introduce addStoredFiles action to handle files with preserved IDs for better session management. - Enhance FilesModalContext to support selection of stored files with metadata. - Update useFileHandler to include logic for adding stored files. - Modify useFileManager to support selection of stored files while maintaining backward compatibility. - Improve file processing service with cancellation capabilities for ongoing operations. - Centralize IndexedDB management with a new IndexedDBManager to streamline database interactions. - Refactor file storage service to utilize the centralized IndexedDB manager for better database handling. - Remove deprecated content hash logic and related fields from file types.
This commit is contained in:
parent
29a4e03784
commit
f691e690e4
@ -16,7 +16,7 @@ interface FileManagerProps {
|
||||
}
|
||||
|
||||
const FileManager: React.FC<FileManagerProps> = ({ selectedTool }) => {
|
||||
const { isFilesModalOpen, closeFilesModal, onFilesSelect } = useFilesModalContext();
|
||||
const { isFilesModalOpen, closeFilesModal, onFilesSelect, onStoredFilesSelect } = useFilesModalContext();
|
||||
const [recentFiles, setRecentFiles] = useState<FileMetadata[]>([]);
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [isMobile, setIsMobile] = useState(false);
|
||||
@ -43,16 +43,19 @@ const FileManager: React.FC<FileManagerProps> = ({ selectedTool }) => {
|
||||
|
||||
const handleFilesSelected = useCallback(async (files: FileMetadata[]) => {
|
||||
try {
|
||||
const fileObjects = await Promise.all(
|
||||
files.map(async (fileWithUrl) => {
|
||||
return await convertToFile(fileWithUrl);
|
||||
})
|
||||
// NEW: Use stored files flow that preserves original IDs
|
||||
const filesWithMetadata = await Promise.all(
|
||||
files.map(async (metadata) => ({
|
||||
file: await convertToFile(metadata),
|
||||
originalId: metadata.id,
|
||||
metadata
|
||||
}))
|
||||
);
|
||||
onFilesSelect(fileObjects);
|
||||
onStoredFilesSelect(filesWithMetadata);
|
||||
} catch (error) {
|
||||
console.error('Failed to process selected files:', error);
|
||||
}
|
||||
}, [convertToFile, onFilesSelect]);
|
||||
}, [convertToFile, onStoredFilesSelect]);
|
||||
|
||||
const handleNewFileUpload = useCallback(async (files: File[]) => {
|
||||
if (files.length > 0) {
|
||||
|
@ -7,7 +7,7 @@ import { Dropzone } from '@mantine/dropzone';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import UploadFileIcon from '@mui/icons-material/UploadFile';
|
||||
import { useToolFileSelection, useProcessedFiles, useFileState, useFileManagement, useFileActions } from '../../contexts/FileContext';
|
||||
import { FileOperation, createStableFileId } from '../../types/fileContext';
|
||||
import { FileOperation } from '../../types/fileContext';
|
||||
import { fileStorage } from '../../services/fileStorage';
|
||||
import { generateThumbnailForFile } from '../../utils/thumbnailUtils';
|
||||
import { zipFileService } from '../../services/zipFileService';
|
||||
|
@ -21,6 +21,7 @@ import { pdfExportService } from "../../services/pdfExportService";
|
||||
import { useThumbnailGeneration } from "../../hooks/useThumbnailGeneration";
|
||||
import { calculateScaleFromFileSize } from "../../utils/thumbnailUtils";
|
||||
import { fileStorage } from "../../services/fileStorage";
|
||||
import { indexedDBManager, DATABASE_CONFIGS } from "../../services/indexedDBManager";
|
||||
import './PageEditor.module.css';
|
||||
import PageThumbnail from './PageThumbnail';
|
||||
import BulkSelectionPanel from './BulkSelectionPanel';
|
||||
@ -184,7 +185,7 @@ const PageEditor = ({
|
||||
totalPages: pages.length,
|
||||
destroy: () => {} // Optional cleanup function
|
||||
};
|
||||
}, [filesSignature, activeFileIds, primaryFileId, primaryFileRecord, processedFilePages, processedFileTotalPages, selectors, getThumbnailFromCache, addThumbnailToCache]);
|
||||
}, [filesSignature, primaryFileId, primaryFileRecord]);
|
||||
|
||||
|
||||
// Display document: Use edited version if exists, otherwise original
|
||||
@ -308,23 +309,9 @@ const PageEditor = ({
|
||||
const pageId = `${primaryFileId}-page-${pageNumber}`;
|
||||
addThumbnailToCache(pageId, thumbnail);
|
||||
|
||||
// Also update the processedFile so document rebuilds include the thumbnail
|
||||
const fileRecord = selectors.getFileRecord(primaryFileId);
|
||||
if (fileRecord?.processedFile?.pages) {
|
||||
const updatedProcessedFile = {
|
||||
...fileRecord.processedFile,
|
||||
pages: fileRecord.processedFile.pages.map((page, index) =>
|
||||
index + 1 === pageNumber
|
||||
? { ...page, thumbnail }
|
||||
: page
|
||||
)
|
||||
};
|
||||
actions.updateFileRecord(primaryFileId, { processedFile: updatedProcessedFile });
|
||||
}
|
||||
|
||||
window.dispatchEvent(new CustomEvent('thumbnailReady', {
|
||||
detail: { pageNumber, thumbnail, pageId }
|
||||
}));
|
||||
// Don't update context state - thumbnails stay in cache only
|
||||
// This eliminates per-page context rerenders
|
||||
// PageThumbnail will find thumbnails via cache polling
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -334,7 +321,7 @@ const PageEditor = ({
|
||||
} catch (error) {
|
||||
console.error('PageEditor: Thumbnail generation failed:', error);
|
||||
}
|
||||
}, [mergedPdfDocument, primaryFileId, activeFileIds, generateThumbnails, getThumbnailFromCache, addThumbnailToCache, selectors, actions]);
|
||||
}, [mergedPdfDocument, primaryFileId, activeFileIds, generateThumbnails, getThumbnailFromCache, addThumbnailToCache, selectors]);
|
||||
|
||||
// Simple useEffect - just generate missing thumbnails when document is ready
|
||||
useEffect(() => {
|
||||
@ -563,7 +550,7 @@ const PageEditor = ({
|
||||
return updatedDoc;
|
||||
}, [actions, hasUnsavedDraft]);
|
||||
|
||||
// Enhanced draft save with proper IndexedDB handling
|
||||
// Enhanced draft save using centralized IndexedDB manager
|
||||
const saveDraftToIndexedDB = useCallback(async (doc: PDFDocument) => {
|
||||
const draftKey = `draft-${doc.id || 'merged'}`;
|
||||
const draftData = {
|
||||
@ -573,173 +560,44 @@ const PageEditor = ({
|
||||
};
|
||||
|
||||
try {
|
||||
// Save to 'pdf-drafts' store in IndexedDB
|
||||
const request = indexedDB.open('stirling-pdf-drafts', 1);
|
||||
request.onupgradeneeded = () => {
|
||||
const db = request.result;
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
db.createObjectStore('drafts');
|
||||
}
|
||||
};
|
||||
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
// Check if the object store exists before trying to access it
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
console.warn('drafts object store does not exist, skipping auto-save');
|
||||
return;
|
||||
}
|
||||
const transaction = db.transaction('drafts', 'readwrite');
|
||||
const store = transaction.objectStore('drafts');
|
||||
store.put(draftData, draftKey);
|
||||
// Use centralized IndexedDB manager
|
||||
const db = await indexedDBManager.openDatabase(DATABASE_CONFIGS.DRAFTS);
|
||||
const transaction = db.transaction('drafts', 'readwrite');
|
||||
const store = transaction.objectStore('drafts');
|
||||
|
||||
const putRequest = store.put(draftData, draftKey);
|
||||
putRequest.onsuccess = () => {
|
||||
console.log('Draft auto-saved to IndexedDB');
|
||||
};
|
||||
putRequest.onerror = () => {
|
||||
console.warn('Failed to put draft data:', putRequest.error);
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.warn('Failed to auto-save draft:', error);
|
||||
|
||||
// Robust IndexedDB initialization with proper error handling
|
||||
const dbRequest = indexedDB.open('stirling-pdf-drafts', 1);
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
dbRequest.onerror = () => {
|
||||
console.warn('Failed to open draft database:', dbRequest.error);
|
||||
reject(dbRequest.error);
|
||||
};
|
||||
|
||||
dbRequest.onupgradeneeded = (event) => {
|
||||
const db = (event.target as IDBOpenDBRequest).result;
|
||||
|
||||
// Create object store if it doesn't exist
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
const store = db.createObjectStore('drafts');
|
||||
console.log('Created drafts object store');
|
||||
}
|
||||
};
|
||||
|
||||
dbRequest.onsuccess = () => {
|
||||
const db = dbRequest.result;
|
||||
|
||||
// Verify object store exists before attempting transaction
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
console.warn('Drafts object store not found, skipping save');
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const transaction = db.transaction('drafts', 'readwrite');
|
||||
const store = transaction.objectStore('drafts');
|
||||
|
||||
transaction.onerror = () => {
|
||||
console.warn('Draft save transaction failed:', transaction.error);
|
||||
reject(transaction.error);
|
||||
};
|
||||
|
||||
transaction.oncomplete = () => {
|
||||
console.log('Draft auto-saved successfully');
|
||||
resolve();
|
||||
};
|
||||
|
||||
const putRequest = store.put(draftData, draftKey);
|
||||
putRequest.onerror = () => {
|
||||
console.warn('Failed to put draft data:', putRequest.error);
|
||||
reject(putRequest.error);
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.warn('Transaction creation failed:', error);
|
||||
reject(error);
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
}, [activeFileIds, selectors]);
|
||||
|
||||
// Enhanced draft cleanup with proper IndexedDB handling
|
||||
// Enhanced draft cleanup using centralized IndexedDB manager
|
||||
const cleanupDraft = useCallback(async () => {
|
||||
const draftKey = `draft-${mergedPdfDocument?.id || 'merged'}`;
|
||||
|
||||
try {
|
||||
const request = indexedDB.open('stirling-pdf-drafts', 1);
|
||||
|
||||
request.onupgradeneeded = () => {
|
||||
const db = request.result;
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
db.createObjectStore('drafts');
|
||||
}
|
||||
// Use centralized IndexedDB manager
|
||||
const db = await indexedDBManager.openDatabase(DATABASE_CONFIGS.DRAFTS);
|
||||
const transaction = db.transaction('drafts', 'readwrite');
|
||||
const store = transaction.objectStore('drafts');
|
||||
|
||||
const deleteRequest = store.delete(draftKey);
|
||||
deleteRequest.onsuccess = () => {
|
||||
console.log('Draft cleaned up successfully');
|
||||
};
|
||||
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
// Check if the object store exists before trying to access it
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
console.warn('drafts object store does not exist, skipping cleanup');
|
||||
return;
|
||||
}
|
||||
const transaction = db.transaction('drafts', 'readwrite');
|
||||
const store = transaction.objectStore('drafts');
|
||||
store.delete(draftKey);
|
||||
deleteRequest.onerror = () => {
|
||||
console.warn('Failed to delete draft:', deleteRequest.error);
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.warn('Failed to cleanup draft:', error);
|
||||
const dbRequest = indexedDB.open('stirling-pdf-drafts', 1);
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
dbRequest.onerror = () => {
|
||||
console.warn('Failed to open draft database for cleanup:', dbRequest.error);
|
||||
resolve(); // Don't fail the whole operation if cleanup fails
|
||||
};
|
||||
|
||||
dbRequest.onupgradeneeded = (event) => {
|
||||
const db = (event.target as IDBOpenDBRequest).result;
|
||||
|
||||
// Create object store if it doesn't exist
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
db.createObjectStore('drafts');
|
||||
console.log('Created drafts object store during cleanup fallback');
|
||||
}
|
||||
};
|
||||
|
||||
dbRequest.onsuccess = () => {
|
||||
const db = dbRequest.result;
|
||||
|
||||
// Check if object store exists before attempting cleanup
|
||||
if (!db.objectStoreNames.contains('drafts')) {
|
||||
console.log('No drafts object store found, nothing to cleanup');
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const transaction = db.transaction('drafts', 'readwrite');
|
||||
const store = transaction.objectStore('drafts');
|
||||
|
||||
transaction.onerror = () => {
|
||||
console.warn('Draft cleanup transaction failed:', transaction.error);
|
||||
resolve(); // Don't fail if cleanup fails
|
||||
};
|
||||
|
||||
transaction.oncomplete = () => {
|
||||
console.log('Draft cleaned up successfully');
|
||||
resolve();
|
||||
};
|
||||
|
||||
const deleteRequest = store.delete(draftKey);
|
||||
deleteRequest.onerror = () => {
|
||||
console.warn('Failed to delete draft:', deleteRequest.error);
|
||||
resolve(); // Don't fail if delete fails
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.warn('Draft cleanup transaction creation failed:', error);
|
||||
resolve(); // Don't fail if cleanup fails
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
}, [mergedPdfDocument]);
|
||||
|
||||
@ -1145,36 +1003,36 @@ const PageEditor = ({
|
||||
}
|
||||
}, [editedDocument, applyChanges, handleExport]);
|
||||
|
||||
// Enhanced draft checking with proper IndexedDB handling
|
||||
// Enhanced draft checking using centralized IndexedDB manager
|
||||
const checkForDrafts = useCallback(async () => {
|
||||
if (!mergedPdfDocument) return;
|
||||
|
||||
try {
|
||||
const draftKey = `draft-${mergedPdfDocument.id || 'merged'}`;
|
||||
const request = indexedDB.open('stirling-pdf-drafts', 1);
|
||||
// Use centralized IndexedDB manager
|
||||
const db = await indexedDBManager.openDatabase(DATABASE_CONFIGS.DRAFTS);
|
||||
const transaction = db.transaction('drafts', 'readonly');
|
||||
const store = transaction.objectStore('drafts');
|
||||
const getRequest = store.get(draftKey);
|
||||
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
if (!db.objectStoreNames.contains('drafts')) return;
|
||||
getRequest.onsuccess = () => {
|
||||
const draft = getRequest.result;
|
||||
if (draft && draft.timestamp) {
|
||||
// Check if draft is recent (within last 24 hours)
|
||||
const draftAge = Date.now() - draft.timestamp;
|
||||
const twentyFourHours = 24 * 60 * 60 * 1000;
|
||||
|
||||
const transaction = db.transaction('drafts', 'readonly');
|
||||
const store = transaction.objectStore('drafts');
|
||||
const getRequest = store.get(draftKey);
|
||||
|
||||
getRequest.onsuccess = () => {
|
||||
const draft = getRequest.result;
|
||||
if (draft && draft.timestamp) {
|
||||
// Check if draft is recent (within last 24 hours)
|
||||
const draftAge = Date.now() - draft.timestamp;
|
||||
const twentyFourHours = 24 * 60 * 60 * 1000;
|
||||
|
||||
if (draftAge < twentyFourHours) {
|
||||
setFoundDraft(draft);
|
||||
setShowResumeModal(true);
|
||||
}
|
||||
if (draftAge < twentyFourHours) {
|
||||
setFoundDraft(draft);
|
||||
setShowResumeModal(true);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
getRequest.onerror = () => {
|
||||
console.warn('Failed to get draft:', getRequest.error);
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.warn('Draft check failed:', error);
|
||||
// Don't throw - draft checking failure shouldn't break the app
|
||||
|
@ -11,6 +11,7 @@ import { PDFPage, PDFDocument } from '../../types/pageEditor';
|
||||
import { RotatePagesCommand, DeletePagesCommand, ToggleSplitCommand } from '../../commands/pageCommands';
|
||||
import { Command } from '../../hooks/useUndoRedo';
|
||||
import { useFileState } from '../../contexts/FileContext';
|
||||
import { useThumbnailGeneration } from '../../hooks/useThumbnailGeneration';
|
||||
import styles from './PageEditor.module.css';
|
||||
import { getDocument, GlobalWorkerOptions } from 'pdfjs-dist';
|
||||
|
||||
@ -80,6 +81,7 @@ const PageThumbnail = React.memo(({
|
||||
}: PageThumbnailProps) => {
|
||||
const [thumbnailUrl, setThumbnailUrl] = useState<string | null>(page.thumbnail);
|
||||
const { state, selectors } = useFileState();
|
||||
const { getThumbnailFromCache } = useThumbnailGeneration();
|
||||
|
||||
// Update thumbnail URL when page prop changes - prevent redundant updates
|
||||
useEffect(() => {
|
||||
@ -95,24 +97,19 @@ const PageThumbnail = React.memo(({
|
||||
return; // Skip if we already have a thumbnail
|
||||
}
|
||||
|
||||
const handleThumbnailReady = (event: CustomEvent) => {
|
||||
const { pageNumber, thumbnail, pageId } = event.detail;
|
||||
|
||||
// Guard: check if this component is still mounted and page still exists
|
||||
if (pageNumber === page.pageNumber && pageId === page.id) {
|
||||
// Additional safety: check if the file still exists in FileContext
|
||||
const fileId = page.id.split('-page-')[0]; // Extract fileId from pageId
|
||||
const fileExists = selectors.getAllFileIds().includes(fileId);
|
||||
|
||||
if (fileExists) {
|
||||
setThumbnailUrl(thumbnail);
|
||||
}
|
||||
// Poll for thumbnail in cache (lightweight polling every 500ms)
|
||||
const pollInterval = setInterval(() => {
|
||||
// Check if thumbnail is now available in cache
|
||||
const cachedThumbnail = getThumbnailFromCache(page.id);
|
||||
if (cachedThumbnail) {
|
||||
setThumbnailUrl(cachedThumbnail);
|
||||
clearInterval(pollInterval); // Stop polling once found
|
||||
}
|
||||
};
|
||||
}, 500);
|
||||
|
||||
window.addEventListener('thumbnailReady', handleThumbnailReady as EventListener);
|
||||
// Cleanup interval
|
||||
return () => {
|
||||
window.removeEventListener('thumbnailReady', handleThumbnailReady as EventListener);
|
||||
clearInterval(pollInterval);
|
||||
};
|
||||
}, [page.pageNumber, page.id]); // Remove thumbnailUrl dependency to stabilize effect
|
||||
|
||||
|
@ -6,7 +6,6 @@ import { useMultipleEndpointsEnabled } from "../../../hooks/useEndpointConfig";
|
||||
import { isImageFormat, isWebFormat } from "../../../utils/convertUtils";
|
||||
import { useToolFileSelection } from "../../../contexts/FileContext";
|
||||
import { useFileState } from "../../../contexts/FileContext";
|
||||
import { createStableFileId } from "../../../types/fileContext";
|
||||
import { detectFileExtension } from "../../../utils/fileUtils";
|
||||
import GroupedFormatDropdown from "./GroupedFormatDropdown";
|
||||
import ConvertToImageSettings from "./ConvertToImageSettings";
|
||||
@ -151,7 +150,21 @@ const ConvertSettings = ({
|
||||
};
|
||||
|
||||
const updateFileSelection = (files: File[]) => {
|
||||
setSelectedFiles(files.map(f => createStableFileId(f)));
|
||||
// Map File objects to their actual IDs in FileContext
|
||||
const fileIds = files.map(file => {
|
||||
// Find the file ID by matching file properties
|
||||
const fileRecord = state.files.ids
|
||||
.map(id => selectors.getFileRecord(id))
|
||||
.find(record =>
|
||||
record &&
|
||||
record.name === file.name &&
|
||||
record.size === file.size &&
|
||||
record.lastModified === file.lastModified
|
||||
);
|
||||
return fileRecord?.id;
|
||||
}).filter((id): id is string => id !== undefined); // Type guard to ensure only strings
|
||||
|
||||
setSelectedFiles(fileIds);
|
||||
};
|
||||
|
||||
const handleFromExtensionChange = (value: string) => {
|
||||
|
@ -37,8 +37,9 @@ import {
|
||||
toFileRecord,
|
||||
revokeFileResources,
|
||||
createFileId,
|
||||
computeContentHash
|
||||
createQuickKey
|
||||
} from '../types/fileContext';
|
||||
import { FileMetadata } from '../types/file';
|
||||
|
||||
// Import real services
|
||||
import { EnhancedPDFProcessingService } from '../services/enhancedPDFProcessingService';
|
||||
@ -124,7 +125,7 @@ function fileContextReducer(state: FileContextState, action: FileContextAction):
|
||||
const existingRecord = state.files.byId[id];
|
||||
if (!existingRecord) return state;
|
||||
|
||||
// Immutable merge supports all FileRecord fields including contentHash, hashStatus
|
||||
// Immutable merge supports all FileRecord fields
|
||||
return {
|
||||
...state,
|
||||
files: {
|
||||
@ -374,8 +375,12 @@ export function FileContextProvider({
|
||||
});
|
||||
blobUrls.current.clear();
|
||||
|
||||
// Clear all processing
|
||||
enhancedPDFProcessingService.clearAllProcessing();
|
||||
// Clear all processing and cache
|
||||
enhancedPDFProcessingService.clearAll();
|
||||
|
||||
// Cancel and clear centralized file processing
|
||||
fileProcessingService.cancelAllProcessing();
|
||||
fileProcessingService.clearCache();
|
||||
|
||||
// Destroy thumbnails
|
||||
thumbnailGenerationService.destroy();
|
||||
@ -414,24 +419,40 @@ export function FileContextProvider({
|
||||
|
||||
// Action implementations
|
||||
const addFiles = useCallback(async (files: File[]): Promise<File[]> => {
|
||||
// Generate UUID-based IDs and create records
|
||||
// Three-tier deduplication: UUID (primary key) + quickKey (soft dedupe) + contentHash (hard dedupe)
|
||||
const fileRecords: FileRecord[] = [];
|
||||
const addedFiles: File[] = [];
|
||||
|
||||
// Build quickKey lookup from existing files for deduplication
|
||||
const existingQuickKeys = new Set<string>();
|
||||
Object.values(stateRef.current.files.byId).forEach(record => {
|
||||
existingQuickKeys.add(record.quickKey);
|
||||
});
|
||||
|
||||
for (const file of files) {
|
||||
const quickKey = createQuickKey(file);
|
||||
|
||||
// Soft deduplication: Check if file already exists by metadata
|
||||
if (existingQuickKeys.has(quickKey)) {
|
||||
console.log(`📄 Skipping duplicate file: ${file.name} (already exists)`);
|
||||
continue; // Skip duplicate file
|
||||
}
|
||||
|
||||
const fileId = createFileId(); // UUID-based, zero collisions
|
||||
|
||||
// Store File in ref map
|
||||
filesRef.current.set(fileId, file);
|
||||
|
||||
// Create record with pending hash status
|
||||
// Create record
|
||||
const record = toFileRecord(file, fileId);
|
||||
record.hashStatus = 'pending';
|
||||
|
||||
// Add to deduplication tracking
|
||||
existingQuickKeys.add(quickKey);
|
||||
|
||||
fileRecords.push(record);
|
||||
addedFiles.push(file);
|
||||
|
||||
// Start centralized file processing (async, non-blocking)
|
||||
// Start centralized file processing (async, non-blocking) - SINGLE CALL
|
||||
fileProcessingService.processFile(file, fileId).then(result => {
|
||||
// Only update if file still exists in context
|
||||
if (filesRef.current.has(fileId)) {
|
||||
@ -448,6 +469,20 @@ export function FileContextProvider({
|
||||
}
|
||||
});
|
||||
console.log(`✅ File processing complete for ${file.name}: ${result.metadata.totalPages} pages`);
|
||||
|
||||
// Optional: Persist to IndexedDB if enabled (reuse the same result)
|
||||
if (enablePersistence) {
|
||||
try {
|
||||
const thumbnail = result.metadata.thumbnailUrl;
|
||||
fileStorage.storeFile(file, fileId, thumbnail).then(() => {
|
||||
console.log('File persisted to IndexedDB:', fileId);
|
||||
}).catch(error => {
|
||||
console.warn('Failed to persist file to IndexedDB:', error);
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn('Failed to initiate file persistence:', error);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.warn(`❌ File processing failed for ${file.name}:`, result.error);
|
||||
}
|
||||
@ -456,38 +491,6 @@ export function FileContextProvider({
|
||||
console.error(`❌ File processing error for ${file.name}:`, error);
|
||||
});
|
||||
|
||||
// Optional: Persist to IndexedDB if enabled
|
||||
if (enablePersistence) {
|
||||
try {
|
||||
// Use the thumbnail from processing service if available
|
||||
fileProcessingService.processFile(file, fileId).then(result => {
|
||||
const thumbnail = result.metadata?.thumbnailUrl;
|
||||
return fileStorage.storeFile(file, fileId, thumbnail);
|
||||
}).then(() => {
|
||||
console.log('File persisted to IndexedDB:', fileId);
|
||||
}).catch(error => {
|
||||
console.warn('Failed to persist file to IndexedDB:', error);
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn('Failed to initiate file persistence:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Start async content hashing (don't block add operation)
|
||||
computeContentHash(file).then(contentHash => {
|
||||
// Only update if file still exists in context
|
||||
if (filesRef.current.has(fileId)) {
|
||||
updateFileRecord(fileId, {
|
||||
contentHash: contentHash || undefined, // Convert null to undefined
|
||||
hashStatus: contentHash ? 'completed' : 'failed'
|
||||
});
|
||||
}
|
||||
}).catch(() => {
|
||||
// Hash failed, update status if file still exists
|
||||
if (filesRef.current.has(fileId)) {
|
||||
updateFileRecord(fileId, { hashStatus: 'failed' });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Only dispatch if we have new files
|
||||
@ -499,7 +502,79 @@ export function FileContextProvider({
|
||||
return addedFiles;
|
||||
}, [enablePersistence]); // Remove updateFileRecord dependency
|
||||
|
||||
// NEW: Add stored files with preserved IDs to prevent duplicates across sessions
|
||||
const addStoredFiles = useCallback(async (filesWithMetadata: Array<{ file: File; originalId: FileId; metadata: FileMetadata }>): Promise<File[]> => {
|
||||
const fileRecords: FileRecord[] = [];
|
||||
const addedFiles: File[] = [];
|
||||
|
||||
for (const { file, originalId, metadata } of filesWithMetadata) {
|
||||
// Skip if file already exists with same ID (exact match)
|
||||
if (stateRef.current.files.byId[originalId]) {
|
||||
console.log(`📄 Skipping stored file: ${file.name} (already loaded with same ID)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Store File in ref map with preserved ID
|
||||
filesRef.current.set(originalId, file);
|
||||
|
||||
// Create record with preserved ID and stored metadata
|
||||
const record: FileRecord = {
|
||||
id: originalId, // Preserve original UUID from storage
|
||||
name: file.name,
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
lastModified: file.lastModified,
|
||||
quickKey: createQuickKey(file),
|
||||
thumbnailUrl: metadata.thumbnail,
|
||||
createdAt: Date.now(),
|
||||
// Skip processedFile for now - it will be populated by background processing if needed
|
||||
};
|
||||
|
||||
fileRecords.push(record);
|
||||
addedFiles.push(file);
|
||||
|
||||
// Background processing with preserved ID (async, non-blocking)
|
||||
fileProcessingService.processFile(file, originalId).then(result => {
|
||||
// Only update if file still exists in context
|
||||
if (filesRef.current.has(originalId)) {
|
||||
if (result.success && result.metadata) {
|
||||
// Update with processed metadata using dispatch directly
|
||||
dispatch({
|
||||
type: 'UPDATE_FILE_RECORD',
|
||||
payload: {
|
||||
id: originalId,
|
||||
updates: {
|
||||
processedFile: result.metadata,
|
||||
// Keep existing thumbnail if available, otherwise use processed one
|
||||
thumbnailUrl: metadata.thumbnail || result.metadata.thumbnailUrl
|
||||
}
|
||||
}
|
||||
});
|
||||
console.log(`✅ Stored file processing complete for ${file.name}: ${result.metadata.totalPages} pages`);
|
||||
} else {
|
||||
console.warn(`❌ Stored file processing failed for ${file.name}:`, result.error);
|
||||
}
|
||||
}
|
||||
}).catch(error => {
|
||||
console.error(`❌ Stored file processing error for ${file.name}:`, error);
|
||||
});
|
||||
}
|
||||
|
||||
// Only dispatch if we have new files
|
||||
if (fileRecords.length > 0) {
|
||||
dispatch({ type: 'ADD_FILES', payload: { fileRecords } });
|
||||
}
|
||||
|
||||
console.log(`📁 Added ${fileRecords.length} stored files with preserved IDs`);
|
||||
return addedFiles;
|
||||
}, []);
|
||||
|
||||
const removeFiles = useCallback((fileIds: FileId[], deleteFromStorage: boolean = true) => {
|
||||
// Cancel any ongoing processing for removed files
|
||||
fileIds.forEach(fileId => {
|
||||
fileProcessingService.cancelProcessing(fileId);
|
||||
});
|
||||
|
||||
// Clean up Files from ref map first
|
||||
fileIds.forEach(fileId => {
|
||||
filesRef.current.delete(fileId);
|
||||
@ -560,6 +635,7 @@ export function FileContextProvider({
|
||||
// Memoized actions to prevent re-renders
|
||||
const actions = useMemo<FileContextActions>(() => ({
|
||||
addFiles,
|
||||
addStoredFiles,
|
||||
removeFiles,
|
||||
updateFileRecord,
|
||||
clearAllFiles: () => {
|
||||
@ -582,7 +658,7 @@ export function FileContextProvider({
|
||||
setMode: (mode: ModeType) => dispatch({ type: 'SET_CURRENT_MODE', payload: mode }),
|
||||
confirmNavigation,
|
||||
cancelNavigation
|
||||
}), [addFiles, removeFiles, cleanupAllFiles, setHasUnsavedChanges, confirmNavigation, cancelNavigation]);
|
||||
}), [addFiles, addStoredFiles, removeFiles, cleanupAllFiles, setHasUnsavedChanges, confirmNavigation, cancelNavigation]);
|
||||
|
||||
// Split context values to minimize re-renders
|
||||
const stateValue = useMemo<FileContextStateValue>(() => ({
|
||||
@ -601,6 +677,7 @@ export function FileContextProvider({
|
||||
...state.ui,
|
||||
// Action compatibility layer
|
||||
addFiles,
|
||||
addStoredFiles,
|
||||
removeFiles,
|
||||
updateFileRecord,
|
||||
clearAllFiles: actions.clearAllFiles,
|
||||
@ -624,7 +701,7 @@ export function FileContextProvider({
|
||||
get activeFiles() { return selectors.getFiles(); }, // Getter to avoid creating new arrays on every render
|
||||
// Selectors
|
||||
...selectors
|
||||
}), [state, actions, addFiles, removeFiles, updateFileRecord, setHasUnsavedChanges, requestNavigation, confirmNavigation, cancelNavigation, trackBlobUrl, trackPdfDocument, cleanupFile, scheduleCleanup]); // Removed selectors dependency
|
||||
}), [state, actions, addFiles, addStoredFiles, removeFiles, updateFileRecord, setHasUnsavedChanges, requestNavigation, confirmNavigation, cancelNavigation, trackBlobUrl, trackPdfDocument, cleanupFile, scheduleCleanup]); // Removed selectors dependency
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
|
@ -1,5 +1,6 @@
|
||||
import React, { createContext, useContext, useState, useCallback, useMemo } from 'react';
|
||||
import { useFileHandler } from '../hooks/useFileHandler';
|
||||
import { FileMetadata } from '../types/file';
|
||||
|
||||
interface FilesModalContextType {
|
||||
isFilesModalOpen: boolean;
|
||||
@ -7,6 +8,7 @@ interface FilesModalContextType {
|
||||
closeFilesModal: () => void;
|
||||
onFileSelect: (file: File) => void;
|
||||
onFilesSelect: (files: File[]) => void;
|
||||
onStoredFilesSelect: (filesWithMetadata: Array<{ file: File; originalId: string; metadata: FileMetadata }>) => void;
|
||||
onModalClose?: () => void;
|
||||
setOnModalClose: (callback: () => void) => void;
|
||||
}
|
||||
@ -14,7 +16,7 @@ interface FilesModalContextType {
|
||||
const FilesModalContext = createContext<FilesModalContextType | null>(null);
|
||||
|
||||
export const FilesModalProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
|
||||
const { addToActiveFiles, addMultipleFiles } = useFileHandler();
|
||||
const { addToActiveFiles, addMultipleFiles, addStoredFiles } = useFileHandler();
|
||||
const [isFilesModalOpen, setIsFilesModalOpen] = useState(false);
|
||||
const [onModalClose, setOnModalClose] = useState<(() => void) | undefined>();
|
||||
|
||||
@ -37,6 +39,11 @@ export const FilesModalProvider: React.FC<{ children: React.ReactNode }> = ({ ch
|
||||
closeFilesModal();
|
||||
}, [addMultipleFiles, closeFilesModal]);
|
||||
|
||||
const handleStoredFilesSelect = useCallback((filesWithMetadata: Array<{ file: File; originalId: string; metadata: FileMetadata }>) => {
|
||||
addStoredFiles(filesWithMetadata);
|
||||
closeFilesModal();
|
||||
}, [addStoredFiles, closeFilesModal]);
|
||||
|
||||
const setModalCloseCallback = useCallback((callback: () => void) => {
|
||||
setOnModalClose(() => callback);
|
||||
}, []);
|
||||
@ -47,6 +54,7 @@ export const FilesModalProvider: React.FC<{ children: React.ReactNode }> = ({ ch
|
||||
closeFilesModal,
|
||||
onFileSelect: handleFileSelect,
|
||||
onFilesSelect: handleFilesSelect,
|
||||
onStoredFilesSelect: handleStoredFilesSelect,
|
||||
onModalClose,
|
||||
setOnModalClose: setModalCloseCallback,
|
||||
}), [
|
||||
@ -55,6 +63,7 @@ export const FilesModalProvider: React.FC<{ children: React.ReactNode }> = ({ ch
|
||||
closeFilesModal,
|
||||
handleFileSelect,
|
||||
handleFilesSelect,
|
||||
handleStoredFilesSelect,
|
||||
onModalClose,
|
||||
setModalCloseCallback,
|
||||
]);
|
||||
|
@ -1,35 +1,38 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useFileState, useFileActions } from '../contexts/FileContext';
|
||||
import { createStableFileId } from '../types/fileContext';
|
||||
import { FileMetadata } from '../types/file';
|
||||
|
||||
export const useFileHandler = () => {
|
||||
const { state } = useFileState();
|
||||
const { state } = useFileState(); // Still needed for addStoredFiles
|
||||
const { actions } = useFileActions();
|
||||
|
||||
const addToActiveFiles = useCallback(async (file: File) => {
|
||||
// Use stable ID function for consistent deduplication
|
||||
const stableId = createStableFileId(file);
|
||||
const exists = state.files.byId[stableId] !== undefined;
|
||||
|
||||
if (!exists) {
|
||||
await actions.addFiles([file]);
|
||||
}
|
||||
}, [state.files.byId, actions.addFiles]);
|
||||
// Let FileContext handle deduplication with quickKey logic
|
||||
await actions.addFiles([file]);
|
||||
}, [actions.addFiles]);
|
||||
|
||||
const addMultipleFiles = useCallback(async (files: File[]) => {
|
||||
// Filter out files that already exist using stable IDs
|
||||
const newFiles = files.filter(file => {
|
||||
const stableId = createStableFileId(file);
|
||||
return state.files.byId[stableId] === undefined;
|
||||
// Let FileContext handle deduplication with quickKey logic
|
||||
await actions.addFiles(files);
|
||||
}, [actions.addFiles]);
|
||||
|
||||
// NEW: Add stored files preserving their original IDs to prevent session duplicates
|
||||
const addStoredFiles = useCallback(async (filesWithMetadata: Array<{ file: File; originalId: string; metadata: FileMetadata }>) => {
|
||||
// Filter out files that already exist with the same ID (exact match)
|
||||
const newFiles = filesWithMetadata.filter(({ originalId }) => {
|
||||
return state.files.byId[originalId] === undefined;
|
||||
});
|
||||
|
||||
if (newFiles.length > 0) {
|
||||
await actions.addFiles(newFiles);
|
||||
await actions.addStoredFiles(newFiles);
|
||||
}
|
||||
}, [state.files.byId, actions.addFiles]);
|
||||
|
||||
console.log(`📁 Added ${newFiles.length} stored files (${filesWithMetadata.length - newFiles.length} skipped as duplicates)`);
|
||||
}, [state.files.byId, actions.addStoredFiles]);
|
||||
|
||||
return {
|
||||
addToActiveFiles,
|
||||
addMultipleFiles,
|
||||
addStoredFiles,
|
||||
};
|
||||
};
|
@ -95,15 +95,30 @@ export const useFileManager = () => {
|
||||
setSelectedFiles([]);
|
||||
};
|
||||
|
||||
const selectMultipleFiles = async (files: FileMetadata[], onFilesSelect: (files: File[]) => void) => {
|
||||
const selectMultipleFiles = async (files: FileMetadata[], onFilesSelect: (files: File[]) => void, onStoredFilesSelect?: (filesWithMetadata: Array<{ file: File; originalId: string; metadata: FileMetadata }>) => void) => {
|
||||
if (selectedFiles.length === 0) return;
|
||||
|
||||
try {
|
||||
// Filter by UUID and convert to File objects
|
||||
const selectedFileObjects = files.filter(f => selectedFiles.includes(f.id));
|
||||
const filePromises = selectedFileObjects.map(convertToFile);
|
||||
const convertedFiles = await Promise.all(filePromises);
|
||||
onFilesSelect(convertedFiles); // FileContext will assign new UUIDs
|
||||
|
||||
if (onStoredFilesSelect) {
|
||||
// NEW: Use stored files flow that preserves IDs
|
||||
const filesWithMetadata = await Promise.all(
|
||||
selectedFileObjects.map(async (metadata) => ({
|
||||
file: await convertToFile(metadata),
|
||||
originalId: metadata.id,
|
||||
metadata
|
||||
}))
|
||||
);
|
||||
onStoredFilesSelect(filesWithMetadata);
|
||||
} else {
|
||||
// LEGACY: Old flow that generates new UUIDs (for backward compatibility)
|
||||
const filePromises = selectedFileObjects.map(convertToFile);
|
||||
const convertedFiles = await Promise.all(filePromises);
|
||||
onFilesSelect(convertedFiles); // FileContext will assign new UUIDs
|
||||
}
|
||||
|
||||
clearSelection();
|
||||
} catch (error) {
|
||||
console.error('Failed to load selected files:', error);
|
||||
|
@ -25,8 +25,13 @@ export interface FileProcessingResult {
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface ProcessingOperation {
|
||||
promise: Promise<FileProcessingResult>;
|
||||
abortController: AbortController;
|
||||
}
|
||||
|
||||
class FileProcessingService {
|
||||
private processingCache = new Map<string, Promise<FileProcessingResult>>();
|
||||
private processingCache = new Map<string, ProcessingOperation>();
|
||||
|
||||
/**
|
||||
* Process a file to extract metadata, page count, and generate thumbnails
|
||||
@ -34,15 +39,24 @@ class FileProcessingService {
|
||||
*/
|
||||
async processFile(file: File, fileId: string): Promise<FileProcessingResult> {
|
||||
// Check if we're already processing this file
|
||||
const existingPromise = this.processingCache.get(fileId);
|
||||
if (existingPromise) {
|
||||
const existingOperation = this.processingCache.get(fileId);
|
||||
if (existingOperation) {
|
||||
console.log(`📁 FileProcessingService: Using cached processing for ${file.name}`);
|
||||
return existingPromise;
|
||||
return existingOperation.promise;
|
||||
}
|
||||
|
||||
// Create abort controller for this operation
|
||||
const abortController = new AbortController();
|
||||
|
||||
// Create processing promise
|
||||
const processingPromise = this.performProcessing(file, fileId);
|
||||
this.processingCache.set(fileId, processingPromise);
|
||||
const processingPromise = this.performProcessing(file, fileId, abortController);
|
||||
|
||||
// Store operation with abort controller
|
||||
const operation: ProcessingOperation = {
|
||||
promise: processingPromise,
|
||||
abortController
|
||||
};
|
||||
this.processingCache.set(fileId, operation);
|
||||
|
||||
// Clean up cache after completion
|
||||
processingPromise.finally(() => {
|
||||
@ -52,18 +66,30 @@ class FileProcessingService {
|
||||
return processingPromise;
|
||||
}
|
||||
|
||||
private async performProcessing(file: File, fileId: string): Promise<FileProcessingResult> {
|
||||
private async performProcessing(file: File, fileId: string, abortController: AbortController): Promise<FileProcessingResult> {
|
||||
console.log(`📁 FileProcessingService: Starting processing for ${file.name} (${fileId})`);
|
||||
|
||||
try {
|
||||
// Check for cancellation at start
|
||||
if (abortController.signal.aborted) {
|
||||
throw new Error('Processing cancelled');
|
||||
}
|
||||
|
||||
let totalPages = 1;
|
||||
let thumbnailUrl: string | undefined;
|
||||
|
||||
// Handle PDF files
|
||||
if (file.type === 'application/pdf') {
|
||||
// Read arrayBuffer once and reuse for both PDF.js and fallback
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
|
||||
// Check for cancellation after async operation
|
||||
if (abortController.signal.aborted) {
|
||||
throw new Error('Processing cancelled');
|
||||
}
|
||||
|
||||
// Discover page count using PDF.js (most accurate)
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const pdfDoc = await getDocument({
|
||||
data: arrayBuffer,
|
||||
disableAutoFetch: true,
|
||||
@ -75,12 +101,16 @@ class FileProcessingService {
|
||||
|
||||
// Clean up immediately
|
||||
pdfDoc.destroy();
|
||||
|
||||
// Check for cancellation after PDF.js processing
|
||||
if (abortController.signal.aborted) {
|
||||
throw new Error('Processing cancelled');
|
||||
}
|
||||
} catch (pdfError) {
|
||||
console.warn(`📁 FileProcessingService: PDF.js failed for ${file.name}, trying fallback:`, pdfError);
|
||||
|
||||
// Fallback to text analysis
|
||||
// Fallback to text analysis (reuse same arrayBuffer)
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const text = new TextDecoder('latin1').decode(arrayBuffer);
|
||||
const pageMatches = text.match(/\/Type\s*\/Page[^s]/g);
|
||||
totalPages = pageMatches ? pageMatches.length : 1;
|
||||
@ -96,6 +126,11 @@ class FileProcessingService {
|
||||
try {
|
||||
thumbnailUrl = await generateThumbnailForFile(file);
|
||||
console.log(`📁 FileProcessingService: Generated thumbnail for ${file.name}`);
|
||||
|
||||
// Check for cancellation after thumbnail generation
|
||||
if (abortController.signal.aborted) {
|
||||
throw new Error('Processing cancelled');
|
||||
}
|
||||
} catch (thumbError) {
|
||||
console.warn(`📁 FileProcessingService: Thumbnail generation failed for ${file.name}:`, thumbError);
|
||||
}
|
||||
@ -145,6 +180,30 @@ class FileProcessingService {
|
||||
isProcessing(fileId: string): boolean {
|
||||
return this.processingCache.has(fileId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel processing for a specific file
|
||||
*/
|
||||
cancelProcessing(fileId: string): boolean {
|
||||
const operation = this.processingCache.get(fileId);
|
||||
if (operation) {
|
||||
operation.abortController.abort();
|
||||
console.log(`📁 FileProcessingService: Cancelled processing for ${fileId}`);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel all ongoing processing operations
|
||||
*/
|
||||
cancelAllProcessing(): void {
|
||||
this.processingCache.forEach((operation, fileId) => {
|
||||
operation.abortController.abort();
|
||||
console.log(`📁 FileProcessingService: Cancelled processing for ${fileId}`);
|
||||
});
|
||||
console.log(`📁 FileProcessingService: Cancelled ${this.processingCache.size} processing operations`);
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
|
@ -1,8 +1,11 @@
|
||||
/**
|
||||
* IndexedDB File Storage Service
|
||||
* Provides high-capacity file storage for PDF processing
|
||||
* Now uses centralized IndexedDB manager
|
||||
*/
|
||||
|
||||
import { indexedDBManager, DATABASE_CONFIGS } from './indexedDBManager';
|
||||
|
||||
export interface StoredFile {
|
||||
id: string;
|
||||
name: string;
|
||||
@ -22,69 +25,21 @@ export interface StorageStats {
|
||||
}
|
||||
|
||||
class FileStorageService {
|
||||
private dbName = 'stirling-pdf-files';
|
||||
private dbVersion = 2; // Increment version to force schema update
|
||||
private storeName = 'files';
|
||||
private db: IDBDatabase | null = null;
|
||||
private initPromise: Promise<void> | null = null;
|
||||
private readonly dbConfig = DATABASE_CONFIGS.FILES;
|
||||
private readonly storeName = 'files';
|
||||
|
||||
/**
|
||||
* Initialize the IndexedDB database (singleton pattern)
|
||||
* Get database connection using centralized manager
|
||||
*/
|
||||
async init(): Promise<void> {
|
||||
if (this.db) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (this.initPromise) {
|
||||
return this.initPromise;
|
||||
}
|
||||
|
||||
this.initPromise = new Promise((resolve, reject) => {
|
||||
const request = indexedDB.open(this.dbName, this.dbVersion);
|
||||
|
||||
request.onerror = () => {
|
||||
this.initPromise = null;
|
||||
reject(request.error);
|
||||
};
|
||||
|
||||
request.onsuccess = () => {
|
||||
this.db = request.result;
|
||||
console.log('IndexedDB connection established');
|
||||
resolve();
|
||||
};
|
||||
|
||||
request.onupgradeneeded = (event) => {
|
||||
const db = (event.target as IDBOpenDBRequest).result;
|
||||
const oldVersion = (event as any).oldVersion;
|
||||
|
||||
console.log('IndexedDB upgrade needed from version', oldVersion, 'to', this.dbVersion);
|
||||
|
||||
// Only recreate object store if it doesn't exist or if upgrading from version < 2
|
||||
if (!db.objectStoreNames.contains(this.storeName)) {
|
||||
const store = db.createObjectStore(this.storeName, { keyPath: 'id' });
|
||||
store.createIndex('name', 'name', { unique: false });
|
||||
store.createIndex('lastModified', 'lastModified', { unique: false });
|
||||
console.log('IndexedDB object store created with keyPath: id');
|
||||
} else if (oldVersion < 2) {
|
||||
// Only delete and recreate if upgrading from version 1 to 2
|
||||
db.deleteObjectStore(this.storeName);
|
||||
const store = db.createObjectStore(this.storeName, { keyPath: 'id' });
|
||||
store.createIndex('name', 'name', { unique: false });
|
||||
store.createIndex('lastModified', 'lastModified', { unique: false });
|
||||
console.log('IndexedDB object store recreated with keyPath: id (version upgrade)');
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
return this.initPromise;
|
||||
private async getDatabase(): Promise<IDBDatabase> {
|
||||
return indexedDBManager.openDatabase(this.dbConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a file in IndexedDB with external UUID
|
||||
*/
|
||||
async storeFile(file: File, fileId: string, thumbnail?: string): Promise<StoredFile> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
|
||||
@ -100,7 +55,7 @@ class FileStorageService {
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readwrite');
|
||||
const transaction = db.transaction([this.storeName], 'readwrite');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
|
||||
// Debug logging
|
||||
@ -134,10 +89,10 @@ class FileStorageService {
|
||||
* Retrieve a file from IndexedDB
|
||||
*/
|
||||
async getFile(id: string): Promise<StoredFile | null> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readonly');
|
||||
const transaction = db.transaction([this.storeName], 'readonly');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const request = store.get(id);
|
||||
|
||||
@ -150,10 +105,10 @@ class FileStorageService {
|
||||
* Get all stored files (WARNING: loads all data into memory)
|
||||
*/
|
||||
async getAllFiles(): Promise<StoredFile[]> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readonly');
|
||||
const transaction = db.transaction([this.storeName], 'readonly');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const request = store.getAll();
|
||||
|
||||
@ -175,10 +130,10 @@ class FileStorageService {
|
||||
* Get metadata of all stored files (without loading data into memory)
|
||||
*/
|
||||
async getAllFileMetadata(): Promise<Omit<StoredFile, 'data'>[]> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readonly');
|
||||
const transaction = db.transaction([this.storeName], 'readonly');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const request = store.openCursor();
|
||||
const files: Omit<StoredFile, 'data'>[] = [];
|
||||
@ -212,10 +167,10 @@ class FileStorageService {
|
||||
* Delete a file from IndexedDB
|
||||
*/
|
||||
async deleteFile(id: string): Promise<void> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readwrite');
|
||||
const transaction = db.transaction([this.storeName], 'readwrite');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const request = store.delete(id);
|
||||
|
||||
@ -228,9 +183,9 @@ class FileStorageService {
|
||||
* Update the lastModified timestamp of a file (for most recently used sorting)
|
||||
*/
|
||||
async touchFile(id: string): Promise<boolean> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readwrite');
|
||||
const transaction = db.transaction([this.storeName], 'readwrite');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
|
||||
const getRequest = store.get(id);
|
||||
@ -254,10 +209,10 @@ class FileStorageService {
|
||||
* Clear all stored files
|
||||
*/
|
||||
async clearAll(): Promise<void> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readwrite');
|
||||
const transaction = db.transaction([this.storeName], 'readwrite');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const request = store.clear();
|
||||
|
||||
@ -270,8 +225,6 @@ class FileStorageService {
|
||||
* Get storage statistics (only our IndexedDB usage)
|
||||
*/
|
||||
async getStorageStats(): Promise<StorageStats> {
|
||||
if (!this.db) await this.init();
|
||||
|
||||
let used = 0;
|
||||
let available = 0;
|
||||
let quota: number | undefined;
|
||||
@ -314,10 +267,10 @@ class FileStorageService {
|
||||
* Get file count quickly without loading metadata
|
||||
*/
|
||||
async getFileCount(): Promise<number> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readonly');
|
||||
const transaction = db.transaction([this.storeName], 'readonly');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const request = store.count();
|
||||
|
||||
@ -364,9 +317,9 @@ class FileStorageService {
|
||||
// Also check our specific database with different versions
|
||||
for (let version = 1; version <= 3; version++) {
|
||||
try {
|
||||
console.log(`Trying to open ${this.dbName} version ${version}...`);
|
||||
console.log(`Trying to open ${this.dbConfig.name} version ${version}...`);
|
||||
const db = await new Promise<IDBDatabase>((resolve, reject) => {
|
||||
const request = indexedDB.open(this.dbName, version);
|
||||
const request = indexedDB.open(this.dbConfig.name, version);
|
||||
request.onsuccess = () => resolve(request.result);
|
||||
request.onerror = () => reject(request.error);
|
||||
request.onupgradeneeded = () => {
|
||||
@ -399,10 +352,10 @@ class FileStorageService {
|
||||
* Debug method to check what's actually in the database
|
||||
*/
|
||||
async debugDatabaseContents(): Promise<void> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readonly');
|
||||
const transaction = db.transaction([this.storeName], 'readonly');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
|
||||
// First try getAll to see if there's anything
|
||||
@ -526,11 +479,11 @@ class FileStorageService {
|
||||
* Update thumbnail for an existing file
|
||||
*/
|
||||
async updateThumbnail(id: string, thumbnail: string): Promise<boolean> {
|
||||
if (!this.db) await this.init();
|
||||
const db = await this.getDatabase();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
const transaction = this.db!.transaction([this.storeName], 'readwrite');
|
||||
const transaction = db.transaction([this.storeName], 'readwrite');
|
||||
const store = transaction.objectStore(this.storeName);
|
||||
const getRequest = store.get(id);
|
||||
|
||||
|
227
frontend/src/services/indexedDBManager.ts
Normal file
227
frontend/src/services/indexedDBManager.ts
Normal file
@ -0,0 +1,227 @@
|
||||
/**
|
||||
* Centralized IndexedDB Manager
|
||||
* Handles all database initialization, schema management, and migrations
|
||||
* Prevents race conditions and duplicate schema upgrades
|
||||
*/
|
||||
|
||||
export interface DatabaseConfig {
|
||||
name: string;
|
||||
version: number;
|
||||
stores: {
|
||||
name: string;
|
||||
keyPath?: string | string[];
|
||||
autoIncrement?: boolean;
|
||||
indexes?: {
|
||||
name: string;
|
||||
keyPath: string | string[];
|
||||
unique: boolean;
|
||||
}[];
|
||||
}[];
|
||||
}
|
||||
|
||||
class IndexedDBManager {
|
||||
private static instance: IndexedDBManager;
|
||||
private databases = new Map<string, IDBDatabase>();
|
||||
private initPromises = new Map<string, Promise<IDBDatabase>>();
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): IndexedDBManager {
|
||||
if (!IndexedDBManager.instance) {
|
||||
IndexedDBManager.instance = new IndexedDBManager();
|
||||
}
|
||||
return IndexedDBManager.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open or get existing database connection
|
||||
*/
|
||||
async openDatabase(config: DatabaseConfig): Promise<IDBDatabase> {
|
||||
const existingDb = this.databases.get(config.name);
|
||||
if (existingDb) {
|
||||
return existingDb;
|
||||
}
|
||||
|
||||
const existingPromise = this.initPromises.get(config.name);
|
||||
if (existingPromise) {
|
||||
return existingPromise;
|
||||
}
|
||||
|
||||
const initPromise = this.performDatabaseInit(config);
|
||||
this.initPromises.set(config.name, initPromise);
|
||||
|
||||
try {
|
||||
const db = await initPromise;
|
||||
this.databases.set(config.name, db);
|
||||
return db;
|
||||
} catch (error) {
|
||||
this.initPromises.delete(config.name);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private performDatabaseInit(config: DatabaseConfig): Promise<IDBDatabase> {
|
||||
return new Promise((resolve, reject) => {
|
||||
console.log(`Opening IndexedDB: ${config.name} v${config.version}`);
|
||||
const request = indexedDB.open(config.name, config.version);
|
||||
|
||||
request.onerror = () => {
|
||||
console.error(`Failed to open ${config.name}:`, request.error);
|
||||
reject(request.error);
|
||||
};
|
||||
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
console.log(`Successfully opened ${config.name}`);
|
||||
|
||||
// Set up close handler to clean up our references
|
||||
db.onclose = () => {
|
||||
console.log(`Database ${config.name} closed`);
|
||||
this.databases.delete(config.name);
|
||||
this.initPromises.delete(config.name);
|
||||
};
|
||||
|
||||
resolve(db);
|
||||
};
|
||||
|
||||
request.onupgradeneeded = (event) => {
|
||||
const db = request.result;
|
||||
const oldVersion = event.oldVersion;
|
||||
|
||||
console.log(`Upgrading ${config.name} from v${oldVersion} to v${config.version}`);
|
||||
|
||||
// Create or update object stores
|
||||
config.stores.forEach(storeConfig => {
|
||||
let store: IDBObjectStore;
|
||||
|
||||
if (db.objectStoreNames.contains(storeConfig.name)) {
|
||||
// Store exists - for now, just continue (could add migration logic here)
|
||||
console.log(`Object store '${storeConfig.name}' already exists`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create new object store
|
||||
const options: IDBObjectStoreParameters = {};
|
||||
if (storeConfig.keyPath) {
|
||||
options.keyPath = storeConfig.keyPath;
|
||||
}
|
||||
if (storeConfig.autoIncrement) {
|
||||
options.autoIncrement = storeConfig.autoIncrement;
|
||||
}
|
||||
|
||||
store = db.createObjectStore(storeConfig.name, options);
|
||||
console.log(`Created object store '${storeConfig.name}'`);
|
||||
|
||||
// Create indexes
|
||||
if (storeConfig.indexes) {
|
||||
storeConfig.indexes.forEach(indexConfig => {
|
||||
store.createIndex(
|
||||
indexConfig.name,
|
||||
indexConfig.keyPath,
|
||||
{ unique: indexConfig.unique }
|
||||
);
|
||||
console.log(`Created index '${indexConfig.name}' on '${storeConfig.name}'`);
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get database connection (must be already opened)
|
||||
*/
|
||||
getDatabase(name: string): IDBDatabase | null {
|
||||
return this.databases.get(name) || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
closeDatabase(name: string): void {
|
||||
const db = this.databases.get(name);
|
||||
if (db) {
|
||||
db.close();
|
||||
this.databases.delete(name);
|
||||
this.initPromises.delete(name);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all database connections
|
||||
*/
|
||||
closeAllDatabases(): void {
|
||||
this.databases.forEach((db, name) => {
|
||||
console.log(`Closing database: ${name}`);
|
||||
db.close();
|
||||
});
|
||||
this.databases.clear();
|
||||
this.initPromises.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete database completely
|
||||
*/
|
||||
async deleteDatabase(name: string): Promise<void> {
|
||||
// Close connection if open
|
||||
this.closeDatabase(name);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const deleteRequest = indexedDB.deleteDatabase(name);
|
||||
|
||||
deleteRequest.onerror = () => reject(deleteRequest.error);
|
||||
deleteRequest.onsuccess = () => {
|
||||
console.log(`Deleted database: ${name}`);
|
||||
resolve();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a database exists and what version it is
|
||||
*/
|
||||
async getDatabaseVersion(name: string): Promise<number | null> {
|
||||
return new Promise((resolve) => {
|
||||
const request = indexedDB.open(name);
|
||||
request.onsuccess = () => {
|
||||
const db = request.result;
|
||||
const version = db.version;
|
||||
db.close();
|
||||
resolve(version);
|
||||
};
|
||||
request.onerror = () => resolve(null);
|
||||
request.onupgradeneeded = () => {
|
||||
// Cancel the upgrade
|
||||
request.transaction?.abort();
|
||||
resolve(null);
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-defined database configurations
|
||||
export const DATABASE_CONFIGS = {
|
||||
FILES: {
|
||||
name: 'stirling-pdf-files',
|
||||
version: 2,
|
||||
stores: [{
|
||||
name: 'files',
|
||||
keyPath: 'id',
|
||||
indexes: [
|
||||
{ name: 'name', keyPath: 'name', unique: false },
|
||||
{ name: 'lastModified', keyPath: 'lastModified', unique: false }
|
||||
]
|
||||
}]
|
||||
} as DatabaseConfig,
|
||||
|
||||
DRAFTS: {
|
||||
name: 'stirling-pdf-drafts',
|
||||
version: 1,
|
||||
stores: [{
|
||||
name: 'drafts',
|
||||
keyPath: 'id'
|
||||
}]
|
||||
} as DatabaseConfig
|
||||
} as const;
|
||||
|
||||
export const indexedDBManager = IndexedDBManager.getInstance();
|
@ -7,8 +7,6 @@ export interface FileWithUrl extends File {
|
||||
id: string; // Required UUID from FileContext
|
||||
url?: string; // Blob URL for display
|
||||
thumbnail?: string;
|
||||
contentHash?: string; // SHA-256 content hash
|
||||
hashStatus?: 'pending' | 'completed' | 'failed';
|
||||
storedInIndexedDB?: boolean;
|
||||
}
|
||||
|
||||
@ -22,8 +20,6 @@ export interface FileMetadata {
|
||||
size: number;
|
||||
lastModified: number;
|
||||
thumbnail?: string;
|
||||
contentHash?: string;
|
||||
hashStatus?: 'pending' | 'completed' | 'failed';
|
||||
storedInIndexedDB?: boolean;
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
import { ProcessedFile } from './processing';
|
||||
import { PDFDocument, PDFPage, PageOperation } from './pageEditor';
|
||||
import { FileMetadata } from './file';
|
||||
|
||||
export type ModeType = 'viewer' | 'pageEditor' | 'fileEditor' | 'merge' | 'split' | 'compress' | 'ocr' | 'convert';
|
||||
|
||||
@ -16,11 +17,10 @@ export interface FileRecord {
|
||||
size: number;
|
||||
type: string;
|
||||
lastModified: number;
|
||||
quickKey: string; // Fast deduplication key: name|size|lastModified
|
||||
thumbnailUrl?: string;
|
||||
blobUrl?: string;
|
||||
createdAt: number;
|
||||
contentHash?: string; // Optional content hash for deduplication
|
||||
hashStatus?: 'pending' | 'completed' | 'failed'; // Hash computation status
|
||||
processedFile?: {
|
||||
pages: Array<{
|
||||
thumbnail?: string;
|
||||
@ -50,53 +50,18 @@ export function createFileId(): FileId {
|
||||
});
|
||||
}
|
||||
|
||||
// Generate quick deduplication key from file metadata
|
||||
export function createQuickKey(file: File): string {
|
||||
// Format: name|size|lastModified for fast duplicate detection
|
||||
return `${file.name}|${file.size}|${file.lastModified}`;
|
||||
}
|
||||
|
||||
// Legacy support - now just delegates to createFileId
|
||||
export function createStableFileId(file: File): FileId {
|
||||
// Don't mutate File objects - always return new UUID
|
||||
return createFileId();
|
||||
}
|
||||
|
||||
// Multi-region content hash for deduplication (head + middle + tail)
|
||||
export async function computeContentHash(file: File): Promise<string | null> {
|
||||
try {
|
||||
const fileSize = file.size;
|
||||
const chunkSize = 32 * 1024; // 32KB chunks
|
||||
const chunks: ArrayBuffer[] = [];
|
||||
|
||||
// Head chunk (first 32KB)
|
||||
chunks.push(await file.slice(0, Math.min(chunkSize, fileSize)).arrayBuffer());
|
||||
|
||||
// Middle chunk (if file is large enough)
|
||||
if (fileSize > chunkSize * 2) {
|
||||
const middleStart = Math.floor(fileSize / 2) - Math.floor(chunkSize / 2);
|
||||
chunks.push(await file.slice(middleStart, middleStart + chunkSize).arrayBuffer());
|
||||
}
|
||||
|
||||
// Tail chunk (last 32KB, if different from head)
|
||||
if (fileSize > chunkSize) {
|
||||
const tailStart = Math.max(chunkSize, fileSize - chunkSize);
|
||||
chunks.push(await file.slice(tailStart, fileSize).arrayBuffer());
|
||||
}
|
||||
|
||||
// Combine all chunks
|
||||
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.byteLength, 0);
|
||||
const combined = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
|
||||
for (const chunk of chunks) {
|
||||
combined.set(new Uint8Array(chunk), offset);
|
||||
offset += chunk.byteLength;
|
||||
}
|
||||
|
||||
// Hash the combined chunks
|
||||
const hashBuffer = await window.crypto.subtle.digest('SHA-256', combined);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
return hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
|
||||
} catch (error) {
|
||||
console.warn('Content hash calculation failed:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function toFileRecord(file: File, id?: FileId): FileRecord {
|
||||
const fileId = id || createStableFileId(file);
|
||||
@ -106,6 +71,7 @@ export function toFileRecord(file: File, id?: FileId): FileRecord {
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
lastModified: file.lastModified,
|
||||
quickKey: createQuickKey(file),
|
||||
createdAt: Date.now()
|
||||
};
|
||||
}
|
||||
@ -225,6 +191,7 @@ export type FileContextAction =
|
||||
export interface FileContextActions {
|
||||
// File management - lightweight actions only
|
||||
addFiles: (files: File[]) => Promise<File[]>;
|
||||
addStoredFiles: (filesWithMetadata: Array<{ file: File; originalId: FileId; metadata: FileMetadata }>) => Promise<File[]>;
|
||||
removeFiles: (fileIds: FileId[], deleteFromStorage?: boolean) => void;
|
||||
updateFileRecord: (id: FileId, updates: Partial<FileRecord>) => void;
|
||||
clearAllFiles: () => void;
|
||||
|
@ -3,7 +3,7 @@
|
||||
* Centralizes all PDF operations with proper type safety
|
||||
*/
|
||||
|
||||
import { FileId } from './fileRecord';
|
||||
import { FileId } from './fileContext';
|
||||
|
||||
export type OperationId = string;
|
||||
|
||||
@ -26,7 +26,7 @@ export interface BaseOperation {
|
||||
createdAt: number;
|
||||
startedAt?: number;
|
||||
completedAt?: number;
|
||||
abortController?: AbortController;1
|
||||
abortController?: AbortController;
|
||||
}
|
||||
|
||||
// Split operations
|
||||
|
Loading…
x
Reference in New Issue
Block a user