Added versions to file editor

This commit is contained in:
Connor Yoh 2025-09-02 18:15:13 +01:00
parent d4e0fb581f
commit 16a0a6dafe
7 changed files with 349 additions and 46 deletions

View File

@ -51,7 +51,7 @@ const FileEditorThumbnail = ({
isSupported = true,
}: FileEditorThumbnailProps) => {
const { t } = useTranslation();
const { pinFile, unpinFile, isFilePinned, activeFiles } = useFileContext();
const { pinFile, unpinFile, isFilePinned, activeFiles, selectors } = useFileContext();
// ---- Drag state ----
const [isDragging, setIsDragging] = useState(false);
@ -65,6 +65,13 @@ const FileEditorThumbnail = ({
}, [activeFiles, file.name, file.size]);
const isPinned = actualFile ? isFilePinned(actualFile) : false;
// Get file record to access tool history
const fileRecord = selectors.getFileRecord(file.id);
const toolHistory = fileRecord?.toolHistory || [];
const hasToolHistory = toolHistory.length > 0;
const versionNumber = fileRecord?.versionNumber || 0;
const downloadSelectedFile = useCallback(() => {
// Prefer parent-provided handler if available
if (typeof onDownloadFile === 'function') {
@ -351,7 +358,8 @@ const FileEditorThumbnail = ({
lineClamp={3}
title={`${extUpper || 'FILE'}${prettySize}`}
>
{/* e.g., Jan 29, 2025 - PDF file - 3 Pages */}
{/* e.g., v2 - Jan 29, 2025 - PDF file - 3 Pages */}
{hasToolHistory ? ` v${versionNumber} - ` : ''}
{dateLabel}
{extUpper ? ` - ${extUpper} file` : ''}
{pageLabel ? ` - ${pageLabel}` : ''}
@ -400,6 +408,26 @@ const FileEditorThumbnail = ({
<span ref={handleRef} className={styles.dragHandle} aria-hidden>
<DragIndicatorIcon fontSize="small" />
</span>
{/* Tool chain display at bottom */}
{hasToolHistory && (
<div style={{
position: 'absolute',
bottom: '4px',
left: '4px',
right: '4px',
padding: '4px 6px',
fontSize: '0.75rem',
textAlign: 'center',
color: 'var(--mantine-color-gray-7)',
fontWeight: 600,
overflow: 'hidden',
textOverflow: 'ellipsis',
whiteSpace: 'nowrap'
}}>
{toolHistory.map(tool => tool.toolName).join(' → ')}
</div>
)}
</div>
</div>
);

View File

@ -142,10 +142,40 @@ export function IndexedDBProvider({ children }: IndexedDBProviderProps) {
const loadAllMetadata = useCallback(async (): Promise<FileMetadata[]> => {
const metadata = await fileStorage.getAllFileMetadata();
// For each PDF file, extract history metadata
const metadataWithHistory = await Promise.all(metadata.map(async (m) => {
// For non-PDF files, return basic metadata
if (!m.type.includes('pdf')) {
// Separate PDF and non-PDF files for different processing
const pdfFiles = metadata.filter(m => m.type.includes('pdf'));
const nonPdfFiles = metadata.filter(m => !m.type.includes('pdf'));
// Process non-PDF files immediately (no history extraction needed)
const nonPdfMetadata: FileMetadata[] = nonPdfFiles.map(m => ({
id: m.id,
name: m.name,
type: m.type,
size: m.size,
lastModified: m.lastModified,
thumbnail: m.thumbnail
}));
// Process PDF files with controlled concurrency to avoid memory issues
const BATCH_SIZE = 5; // Process 5 PDFs at a time to avoid overwhelming memory
const pdfMetadata: FileMetadata[] = [];
for (let i = 0; i < pdfFiles.length; i += BATCH_SIZE) {
const batch = pdfFiles.slice(i, i + BATCH_SIZE);
const batchResults = await Promise.all(batch.map(async (m) => {
try {
// For PDF files, load and extract history with timeout
const storedFile = await fileStorage.getFile(m.id);
if (storedFile?.data) {
const file = new File([storedFile.data], m.name, { type: m.type });
return await createFileMetadataWithHistory(file, m.id, m.thumbnail);
}
} catch (error) {
if (DEBUG) console.warn('🗂️ Failed to extract history from stored file:', m.name, error);
}
// Fallback to basic metadata if history extraction fails
return {
id: m.id,
name: m.name,
@ -154,34 +184,12 @@ export function IndexedDBProvider({ children }: IndexedDBProviderProps) {
lastModified: m.lastModified,
thumbnail: m.thumbnail
};
}
}));
pdfMetadata.push(...batchResults);
}
try {
// For PDF files, load and extract history
const storedFile = await fileStorage.getFile(m.id);
if (storedFile?.data) {
const file = new File([storedFile.data], m.name, { type: m.type });
const enhancedMetadata = await createFileMetadataWithHistory(file, m.id, m.thumbnail);
return enhancedMetadata;
}
} catch (error) {
if (DEBUG) console.warn('🗂️ IndexedDB.loadAllMetadata: Failed to extract history from stored file:', m.name, error);
}
// Fallback to basic metadata if history extraction fails
return {
id: m.id,
name: m.name,
type: m.type,
size: m.size,
lastModified: m.lastModified,
thumbnail: m.thumbnail
};
}));
return metadataWithHistory;
return [...nonPdfMetadata, ...pdfMetadata];
}, []);
const deleteMultiple = useCallback(async (fileIds: FileId[]): Promise<void> => {

View File

@ -125,16 +125,18 @@ export function fileContextReducer(state: FileContextState, action: FileContextA
return state; // File doesn't exist, no-op
}
const updatedRecord = {
...existingRecord,
...updates
};
return {
...state,
files: {
...state.files,
byId: {
...state.files.byId,
[id]: {
...existingRecord,
...updates
}
[id]: updatedRecord
}
}
};

View File

@ -202,7 +202,7 @@ export async function addFiles(
});
}
}).catch(error => {
if (DEBUG) console.warn(`📄 addFiles(raw): Failed to extract history for ${file.name}:`, error);
if (DEBUG) console.warn(`📄 Failed to extract history for ${file.name}:`, error);
});
existingQuickKeys.add(quickKey);
@ -248,9 +248,18 @@ export async function addFiles(
}
// Extract file history from PDF metadata (async)
if (DEBUG) console.log(`📄 addFiles(processed): Starting async history extraction for ${file.name}`);
extractFileHistory(file, record).then(updatedRecord => {
if (DEBUG) console.log(`📄 addFiles(processed): History extraction completed for ${file.name}:`, {
hasChanges: updatedRecord !== record,
originalFileId: updatedRecord.originalFileId,
versionNumber: updatedRecord.versionNumber,
toolHistoryLength: updatedRecord.toolHistory?.length || 0
});
if (updatedRecord !== record && (updatedRecord.originalFileId || updatedRecord.versionNumber)) {
// History was found, dispatch update to trigger re-render
if (DEBUG) console.log(`📄 addFiles(processed): Dispatching UPDATE_FILE_RECORD for ${file.name}`);
dispatch({
type: 'UPDATE_FILE_RECORD',
payload: {
@ -263,9 +272,11 @@ export async function addFiles(
}
}
});
} else {
if (DEBUG) console.log(`📄 addFiles(processed): No history found for ${file.name}, skipping update`);
}
}).catch(error => {
if (DEBUG) console.warn(`📄 addFiles(processed): Failed to extract history for ${file.name}:`, error);
if (DEBUG) console.error(`📄 addFiles(processed): Failed to extract history for ${file.name}:`, error);
});
existingQuickKeys.add(quickKey);
@ -343,6 +354,27 @@ export async function addFiles(
if (DEBUG) console.log(`📄 addFiles(stored): Created processedFile metadata for ${file.name} with ${pageCount} pages`);
}
// Extract file history from PDF metadata (async) - same as raw files
extractFileHistory(file, record).then(updatedRecord => {
if (updatedRecord !== record && (updatedRecord.originalFileId || updatedRecord.versionNumber)) {
// History was found, dispatch update to trigger re-render
dispatch({
type: 'UPDATE_FILE_RECORD',
payload: {
id: fileId,
updates: {
originalFileId: updatedRecord.originalFileId,
versionNumber: updatedRecord.versionNumber,
parentFileId: updatedRecord.parentFileId,
toolHistory: updatedRecord.toolHistory
}
}
});
}
}).catch(error => {
if (DEBUG) console.warn(`📄 Failed to extract history for ${file.name}:`, error);
});
existingQuickKeys.add(quickKey);
fileRecords.push(record);
addedFiles.push({ file, id: fileId, thumbnail: metadata.thumbnail });
@ -399,6 +431,25 @@ async function processFilesIntoRecords(
record.processedFile = createProcessedFile(pageCount, thumbnail);
}
// Extract file history from PDF metadata (synchronous during consumeFiles)
if (file.type.includes('pdf')) {
try {
const updatedRecord = await extractFileHistory(file, record);
if (updatedRecord !== record && (updatedRecord.originalFileId || updatedRecord.versionNumber)) {
// Update the record directly with history data
Object.assign(record, {
originalFileId: updatedRecord.originalFileId,
versionNumber: updatedRecord.versionNumber,
parentFileId: updatedRecord.parentFileId,
toolHistory: updatedRecord.toolHistory
});
}
} catch (error) {
if (DEBUG) console.warn(`📄 Failed to extract history for ${file.name}:`, error);
}
}
return { record, file, fileId, thumbnail };
})
);

View File

@ -29,7 +29,7 @@ export class PDFExportService {
// Load original PDF and create new document
const originalPDFBytes = await pdfDocument.file.arrayBuffer();
const sourceDoc = await PDFLibDocument.load(originalPDFBytes);
const sourceDoc = await PDFLibDocument.load(originalPDFBytes, { ignoreEncryption: true });
const blob = await this.createSingleDocument(sourceDoc, pagesToExport);
const exportFilename = this.generateFilename(filename || pdfDocument.name, selectedOnly, false);
@ -86,7 +86,7 @@ export class PDFExportService {
for (const [fileId, file] of sourceFiles) {
try {
const arrayBuffer = await file.arrayBuffer();
const doc = await PDFLibDocument.load(arrayBuffer);
const doc = await PDFLibDocument.load(arrayBuffer, { ignoreEncryption: true });
loadedDocs.set(fileId, doc);
} catch (error) {
console.warn(`Failed to load source file ${fileId}:`, error);

View File

@ -8,6 +8,7 @@
import { PDFDocument } from 'pdf-lib';
import { FileId } from '../types/file';
import { ContentCache, type CacheConfig } from '../utils/ContentCache';
const DEBUG = process.env.NODE_ENV === 'development';
@ -42,6 +43,21 @@ export interface PDFHistoryMetadata {
export class PDFMetadataService {
private static readonly HISTORY_KEYWORD = 'stirling-history';
private static readonly FORMAT_VERSION = '1.0';
private metadataCache: ContentCache<PDFHistoryMetadata | null>;
constructor(cacheConfig?: Partial<CacheConfig>) {
const defaultConfig: CacheConfig = {
ttl: 5 * 60 * 1000, // 5 minutes
maxSize: 100, // 100 files
enableWarnings: DEBUG
};
this.metadataCache = new ContentCache<PDFHistoryMetadata | null>({
...defaultConfig,
...cacheConfig
});
}
/**
* Inject file history metadata into a PDF
@ -54,7 +70,7 @@ export class PDFMetadataService {
versionNumber: number = 1
): Promise<ArrayBuffer> {
try {
const pdfDoc = await PDFDocument.load(pdfBytes);
const pdfDoc = await PDFDocument.load(pdfBytes, { ignoreEncryption: true });
const historyMetadata: PDFHistoryMetadata = {
stirlingHistory: {
@ -120,8 +136,29 @@ export class PDFMetadataService {
* Extract file history metadata from a PDF
*/
async extractHistoryMetadata(pdfBytes: ArrayBuffer): Promise<PDFHistoryMetadata | null> {
const cacheKey = this.metadataCache.generateKeyFromBuffer(pdfBytes);
// Check cache first
const cached = this.metadataCache.get(cacheKey);
if (cached !== null) {
return cached;
}
// Extract from PDF
const metadata = await this.extractHistoryMetadataInternal(pdfBytes);
// Cache the result
this.metadataCache.set(cacheKey, metadata);
return metadata;
}
/**
* Internal method for actual PDF metadata extraction
*/
private async extractHistoryMetadataInternal(pdfBytes: ArrayBuffer): Promise<PDFHistoryMetadata | null> {
try {
const pdfDoc = await PDFDocument.load(pdfBytes);
const pdfDoc = await PDFDocument.load(pdfBytes, { ignoreEncryption: true });
const keywords = pdfDoc.getKeywords();
// Look for history keyword directly in array or convert to string
@ -167,7 +204,7 @@ export class PDFMetadataService {
return metadata;
} catch (error) {
if (DEBUG) console.error('📄 pdfMetadataService.extractHistoryMetadata: Failed to extract:', error);
if (DEBUG) console.error('📄 Failed to extract PDF metadata:', error);
return null;
}
}
@ -327,5 +364,9 @@ export class PDFMetadataService {
}
}
// Export singleton instance
export const pdfMetadataService = new PDFMetadataService();
// Export singleton instance with optimized cache settings
export const pdfMetadataService = new PDFMetadataService({
ttl: 10 * 60 * 1000, // 10 minutes for PDF metadata (longer than default)
maxSize: 50, // Smaller cache for memory efficiency
enableWarnings: DEBUG
});

View File

@ -0,0 +1,173 @@
/**
* Generic content cache with TTL and size limits
* Reusable for any cached data with configurable parameters
*/
const DEBUG = process.env.NODE_ENV === 'development';
interface CacheEntry<T> {
value: T;
timestamp: number;
}
export interface CacheConfig {
/** Time-to-live in milliseconds */
ttl: number;
/** Maximum number of cache entries */
maxSize: number;
/** Enable cleanup warnings in development */
enableWarnings?: boolean;
}
export class ContentCache<T> {
private cache = new Map<string, CacheEntry<T>>();
private hits = 0;
private misses = 0;
constructor(private readonly config: CacheConfig) {}
/**
* Get cached value if valid
*/
get(key: string): T | null {
const entry = this.cache.get(key);
if (!entry) {
this.misses++;
return null;
}
// Check if expired
if (Date.now() - entry.timestamp > this.config.ttl) {
this.cache.delete(key);
this.misses++;
return null;
}
this.hits++;
return entry.value;
}
/**
* Set cached value
*/
set(key: string, value: T): void {
// Clean up before adding if at capacity
if (this.cache.size >= this.config.maxSize) {
this.evictOldest();
}
this.cache.set(key, {
value,
timestamp: Date.now()
});
}
/**
* Generate cache key from ArrayBuffer content
*/
generateKeyFromBuffer(data: ArrayBuffer): string {
// Use file size + hash of first/last bytes as cache key
const view = new Uint8Array(data);
const size = data.byteLength;
const start = Array.from(view.slice(0, 16)).join(',');
const end = Array.from(view.slice(-16)).join(',');
return `${size}-${this.simpleHash(start + end)}`;
}
/**
* Generate cache key from string content
*/
generateKeyFromString(content: string): string {
return this.simpleHash(content);
}
/**
* Check if key exists and is valid
*/
has(key: string): boolean {
return this.get(key) !== null;
}
/**
* Clear all cache entries
*/
clear(): void {
this.cache.clear();
this.hits = 0;
this.misses = 0;
}
/**
* Get cache statistics
*/
getStats(): {
size: number;
maxSize: number;
hitRate: number;
hits: number;
misses: number;
} {
const total = this.hits + this.misses;
const hitRate = total > 0 ? this.hits / total : 0;
return {
size: this.cache.size,
maxSize: this.config.maxSize,
hitRate,
hits: this.hits,
misses: this.misses
};
}
/**
* Cleanup expired entries
*/
cleanup(): void {
const now = Date.now();
let cleaned = 0;
for (const [key, entry] of this.cache.entries()) {
if (now - entry.timestamp > this.config.ttl) {
this.cache.delete(key);
cleaned++;
}
}
if (DEBUG && this.config.enableWarnings && this.cache.size > this.config.maxSize * 0.8) {
console.warn(`📦 ContentCache: High cache usage (${this.cache.size}/${this.config.maxSize}), cleaned ${cleaned} expired entries`);
}
}
/**
* Evict oldest entry when at capacity
*/
private evictOldest(): void {
let oldestKey: string | null = null;
let oldestTime = Date.now();
for (const [key, entry] of this.cache.entries()) {
if (entry.timestamp < oldestTime) {
oldestTime = entry.timestamp;
oldestKey = key;
}
}
if (oldestKey) {
this.cache.delete(oldestKey);
}
}
/**
* Simple hash function for cache keys
*/
private simpleHash(str: string): string {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}