mirror of
https://github.com/Stirling-Tools/Stirling-PDF.git
synced 2025-09-24 04:26:14 +00:00
Compare commits
4 Commits
307f960a8a
...
9c49cf9584
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9c49cf9584 | ||
![]() |
18097a6d9b | ||
![]() |
be31da217b | ||
![]() |
cfdb6eaa1e |
@ -1468,7 +1468,6 @@
|
|||||||
"submit": "Submit"
|
"submit": "Submit"
|
||||||
},
|
},
|
||||||
"scalePages": {
|
"scalePages": {
|
||||||
"tags": "resize,modify,dimension,adapt",
|
|
||||||
"title": "Adjust page-scale",
|
"title": "Adjust page-scale",
|
||||||
"header": "Adjust page-scale",
|
"header": "Adjust page-scale",
|
||||||
"pageSize": "Size of a page of the document.",
|
"pageSize": "Size of a page of the document.",
|
||||||
@ -1476,6 +1475,44 @@
|
|||||||
"scaleFactor": "Zoom level (crop) of a page.",
|
"scaleFactor": "Zoom level (crop) of a page.",
|
||||||
"submit": "Submit"
|
"submit": "Submit"
|
||||||
},
|
},
|
||||||
|
"adjustPageScale": {
|
||||||
|
"tags": "resize,modify,dimension,adapt",
|
||||||
|
"title": "Adjust Page Scale",
|
||||||
|
"header": "Adjust Page Scale",
|
||||||
|
"scaleFactor": {
|
||||||
|
"label": "Scale Factor"
|
||||||
|
},
|
||||||
|
"pageSize": {
|
||||||
|
"label": "Target Page Size",
|
||||||
|
"keep": "Keep Original Size",
|
||||||
|
"letter": "Letter",
|
||||||
|
"legal": "Legal"
|
||||||
|
},
|
||||||
|
"submit": "Adjust Page Scale",
|
||||||
|
"error": {
|
||||||
|
"failed": "An error occurred while adjusting the page scale."
|
||||||
|
},
|
||||||
|
"tooltip": {
|
||||||
|
"header": {
|
||||||
|
"title": "Page Scale Settings Overview"
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"title": "Description",
|
||||||
|
"text": "Adjust the size of PDF content and change the page dimensions."
|
||||||
|
},
|
||||||
|
"scaleFactor": {
|
||||||
|
"title": "Scale Factor",
|
||||||
|
"text": "Controls how large or small the content appears on the page. Content is scaled and centred - if scaled content is larger than the page size, it may be cropped.",
|
||||||
|
"bullet1": "1.0 = Original size",
|
||||||
|
"bullet2": "0.5 = Half size (50% smaller)",
|
||||||
|
"bullet3": "2.0 = Double size (200% larger, may crop)"
|
||||||
|
},
|
||||||
|
"pageSize": {
|
||||||
|
"title": "Target Page Size",
|
||||||
|
"text": "Sets the dimensions of the output PDF pages. 'Keep Original Size' maintains current dimensions, whilst other options resize to standard paper sizes."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"add-page-numbers": {
|
"add-page-numbers": {
|
||||||
"tags": "paginate,label,organize,index"
|
"tags": "paginate,label,organize,index"
|
||||||
},
|
},
|
||||||
|
@ -0,0 +1,64 @@
|
|||||||
|
import { describe, expect, test, vi, beforeEach } from 'vitest';
|
||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
import { MantineProvider } from '@mantine/core';
|
||||||
|
import AdjustPageScaleSettings from './AdjustPageScaleSettings';
|
||||||
|
import { AdjustPageScaleParameters, PageSize } from '../../../hooks/tools/adjustPageScale/useAdjustPageScaleParameters';
|
||||||
|
|
||||||
|
// Mock useTranslation with predictable return values
|
||||||
|
const mockT = vi.fn((key: string, fallback?: string) => fallback || `mock-${key}`);
|
||||||
|
vi.mock('react-i18next', () => ({
|
||||||
|
useTranslation: () => ({ t: mockT })
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Wrapper component to provide Mantine context
|
||||||
|
const TestWrapper = ({ children }: { children: React.ReactNode }) => (
|
||||||
|
<MantineProvider>{children}</MantineProvider>
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('AdjustPageScaleSettings', () => {
|
||||||
|
const defaultParameters: AdjustPageScaleParameters = {
|
||||||
|
scaleFactor: 1.0,
|
||||||
|
pageSize: PageSize.KEEP,
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockOnParameterChange = vi.fn();
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render without crashing', () => {
|
||||||
|
render(
|
||||||
|
<TestWrapper>
|
||||||
|
<AdjustPageScaleSettings
|
||||||
|
parameters={defaultParameters}
|
||||||
|
onParameterChange={mockOnParameterChange}
|
||||||
|
/>
|
||||||
|
</TestWrapper>
|
||||||
|
);
|
||||||
|
|
||||||
|
// Basic render test - component renders without throwing
|
||||||
|
expect(screen.getByText('Scale Factor')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Target Page Size')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should render with custom parameters', () => {
|
||||||
|
const customParameters: AdjustPageScaleParameters = {
|
||||||
|
scaleFactor: 2.5,
|
||||||
|
pageSize: PageSize.A4,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(
|
||||||
|
<TestWrapper>
|
||||||
|
<AdjustPageScaleSettings
|
||||||
|
parameters={customParameters}
|
||||||
|
onParameterChange={mockOnParameterChange}
|
||||||
|
/>
|
||||||
|
</TestWrapper>
|
||||||
|
);
|
||||||
|
|
||||||
|
// Component renders successfully with custom parameters
|
||||||
|
expect(screen.getByText('Scale Factor')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Target Page Size')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,55 @@
|
|||||||
|
import { Stack, NumberInput, Select } from "@mantine/core";
|
||||||
|
import { useTranslation } from "react-i18next";
|
||||||
|
import { AdjustPageScaleParameters, PageSize } from "../../../hooks/tools/adjustPageScale/useAdjustPageScaleParameters";
|
||||||
|
|
||||||
|
interface AdjustPageScaleSettingsProps {
|
||||||
|
parameters: AdjustPageScaleParameters;
|
||||||
|
onParameterChange: <K extends keyof AdjustPageScaleParameters>(key: K, value: AdjustPageScaleParameters[K]) => void;
|
||||||
|
disabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AdjustPageScaleSettings = ({ parameters, onParameterChange, disabled = false }: AdjustPageScaleSettingsProps) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
const pageSizeOptions = [
|
||||||
|
{ value: PageSize.KEEP, label: t('adjustPageScale.pageSize.keep', 'Keep Original Size') },
|
||||||
|
{ value: PageSize.A0, label: 'A0' },
|
||||||
|
{ value: PageSize.A1, label: 'A1' },
|
||||||
|
{ value: PageSize.A2, label: 'A2' },
|
||||||
|
{ value: PageSize.A3, label: 'A3' },
|
||||||
|
{ value: PageSize.A4, label: 'A4' },
|
||||||
|
{ value: PageSize.A5, label: 'A5' },
|
||||||
|
{ value: PageSize.A6, label: 'A6' },
|
||||||
|
{ value: PageSize.LETTER, label: t('adjustPageScale.pageSize.letter', 'Letter') },
|
||||||
|
{ value: PageSize.LEGAL, label: t('adjustPageScale.pageSize.legal', 'Legal') },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Stack gap="md">
|
||||||
|
<NumberInput
|
||||||
|
label={t('adjustPageScale.scaleFactor.label', 'Scale Factor')}
|
||||||
|
value={parameters.scaleFactor}
|
||||||
|
onChange={(value) => onParameterChange('scaleFactor', typeof value === 'number' ? value : 1.0)}
|
||||||
|
min={0.1}
|
||||||
|
max={10.0}
|
||||||
|
step={0.1}
|
||||||
|
decimalScale={2}
|
||||||
|
disabled={disabled}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<Select
|
||||||
|
label={t('adjustPageScale.pageSize.label', 'Target Page Size')}
|
||||||
|
value={parameters.pageSize}
|
||||||
|
onChange={(value) => {
|
||||||
|
if (value && Object.values(PageSize).includes(value as PageSize)) {
|
||||||
|
onParameterChange('pageSize', value as PageSize);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
data={pageSizeOptions}
|
||||||
|
disabled={disabled}
|
||||||
|
/>
|
||||||
|
</Stack>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default AdjustPageScaleSettings;
|
31
frontend/src/components/tooltips/useAdjustPageScaleTips.ts
Normal file
31
frontend/src/components/tooltips/useAdjustPageScaleTips.ts
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { TooltipContent } from '../../types/tips';
|
||||||
|
|
||||||
|
export const useAdjustPageScaleTips = (): TooltipContent => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
return {
|
||||||
|
header: {
|
||||||
|
title: t("adjustPageScale.tooltip.header.title", "Page Scale Settings Overview")
|
||||||
|
},
|
||||||
|
tips: [
|
||||||
|
{
|
||||||
|
title: t("adjustPageScale.tooltip.description.title", "Description"),
|
||||||
|
description: t("adjustPageScale.tooltip.description.text", "Adjust the size of PDF content and change the page dimensions.")
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t("adjustPageScale.tooltip.scaleFactor.title", "Scale Factor"),
|
||||||
|
description: t("adjustPageScale.tooltip.scaleFactor.text", "Controls how large or small the content appears on the page. Content is scaled and centered - if scaled content is larger than the page size, it may be cropped."),
|
||||||
|
bullets: [
|
||||||
|
t("adjustPageScale.tooltip.scaleFactor.bullet1", "1.0 = Original size"),
|
||||||
|
t("adjustPageScale.tooltip.scaleFactor.bullet2", "0.5 = Half size (50% smaller)"),
|
||||||
|
t("adjustPageScale.tooltip.scaleFactor.bullet3", "2.0 = Double size (200% larger, may crop)")
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t("adjustPageScale.tooltip.pageSize.title", "Target Page Size"),
|
||||||
|
description: t("adjustPageScale.tooltip.pageSize.text", "Sets the dimensions of the output PDF pages. 'Keep Original Size' maintains current dimensions, while other options resize to standard paper sizes.")
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
};
|
@ -14,9 +14,9 @@ interface FileManagerContextValue {
|
|||||||
selectedFiles: StirlingFileStub[];
|
selectedFiles: StirlingFileStub[];
|
||||||
filteredFiles: StirlingFileStub[];
|
filteredFiles: StirlingFileStub[];
|
||||||
fileInputRef: React.RefObject<HTMLInputElement | null>;
|
fileInputRef: React.RefObject<HTMLInputElement | null>;
|
||||||
selectedFilesSet: Set<string>;
|
selectedFilesSet: Set<FileId>;
|
||||||
expandedFileIds: Set<string>;
|
expandedFileIds: Set<FileId>;
|
||||||
fileGroups: Map<string, StirlingFileStub[]>;
|
fileGroups: Map<FileId, StirlingFileStub[]>;
|
||||||
loadedHistoryFiles: Map<FileId, StirlingFileStub[]>;
|
loadedHistoryFiles: Map<FileId, StirlingFileStub[]>;
|
||||||
|
|
||||||
// Handlers
|
// Handlers
|
||||||
@ -76,7 +76,7 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
const [selectedFileIds, setSelectedFileIds] = useState<FileId[]>([]);
|
const [selectedFileIds, setSelectedFileIds] = useState<FileId[]>([]);
|
||||||
const [searchTerm, setSearchTerm] = useState('');
|
const [searchTerm, setSearchTerm] = useState('');
|
||||||
const [lastClickedIndex, setLastClickedIndex] = useState<number | null>(null);
|
const [lastClickedIndex, setLastClickedIndex] = useState<number | null>(null);
|
||||||
const [expandedFileIds, setExpandedFileIds] = useState<Set<string>>(new Set());
|
const [expandedFileIds, setExpandedFileIds] = useState<Set<FileId>>(new Set());
|
||||||
const [loadedHistoryFiles, setLoadedHistoryFiles] = useState<Map<FileId, StirlingFileStub[]>>(new Map()); // Cache for loaded history
|
const [loadedHistoryFiles, setLoadedHistoryFiles] = useState<Map<FileId, StirlingFileStub[]>>(new Map()); // Cache for loaded history
|
||||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
@ -173,12 +173,12 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
|
|
||||||
// Helper function to safely determine which files can be deleted
|
// Helper function to safely determine which files can be deleted
|
||||||
const getSafeFilesToDelete = useCallback((
|
const getSafeFilesToDelete = useCallback((
|
||||||
fileIds: string[],
|
fileIds: FileId[],
|
||||||
allStoredStubs: StirlingFileStub[]
|
allStoredStubs: StirlingFileStub[]
|
||||||
): string[] => {
|
): FileId[] => {
|
||||||
const fileMap = new Map(allStoredStubs.map(f => [f.id as string, f]));
|
const fileMap = new Map(allStoredStubs.map(f => [f.id, f]));
|
||||||
const filesToDelete = new Set<string>();
|
const filesToDelete = new Set<FileId>();
|
||||||
const filesToPreserve = new Set<string>();
|
const filesToPreserve = new Set<FileId>();
|
||||||
|
|
||||||
// First, identify all files in the lineages of the leaf files being deleted
|
// First, identify all files in the lineages of the leaf files being deleted
|
||||||
for (const leafFileId of fileIds) {
|
for (const leafFileId of fileIds) {
|
||||||
@ -222,8 +222,8 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
let safeToDelete = Array.from(filesToDelete).filter(fileId => !filesToPreserve.has(fileId));
|
let safeToDelete = Array.from(filesToDelete).filter(fileId => !filesToPreserve.has(fileId));
|
||||||
|
|
||||||
// Check for orphaned non-leaf files after main deletion
|
// Check for orphaned non-leaf files after main deletion
|
||||||
const remainingFiles = allStoredStubs.filter(file => !safeToDelete.includes(file.id as string));
|
const remainingFiles = allStoredStubs.filter(file => !safeToDelete.includes(file.id));
|
||||||
const orphanedNonLeafFiles: string[] = [];
|
const orphanedNonLeafFiles: FileId[] = [];
|
||||||
|
|
||||||
for (const file of remainingFiles) {
|
for (const file of remainingFiles) {
|
||||||
// Only check non-leaf files (files that have been processed and have children)
|
// Only check non-leaf files (files that have been processed and have children)
|
||||||
@ -243,7 +243,7 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (!hasLivingDescendants) {
|
if (!hasLivingDescendants) {
|
||||||
orphanedNonLeafFiles.push(file.id as string);
|
orphanedNonLeafFiles.push(file.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -251,13 +251,6 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
// Add orphaned non-leaf files to deletion list
|
// Add orphaned non-leaf files to deletion list
|
||||||
safeToDelete = [...safeToDelete, ...orphanedNonLeafFiles];
|
safeToDelete = [...safeToDelete, ...orphanedNonLeafFiles];
|
||||||
|
|
||||||
console.log('Deletion analysis:', {
|
|
||||||
candidatesForDeletion: Array.from(filesToDelete),
|
|
||||||
mustPreserve: Array.from(filesToPreserve),
|
|
||||||
orphanedNonLeafFiles,
|
|
||||||
safeToDelete
|
|
||||||
});
|
|
||||||
|
|
||||||
return safeToDelete;
|
return safeToDelete;
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
@ -269,9 +262,7 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
const allStoredStubs = await fileStorage.getAllStirlingFileStubs();
|
const allStoredStubs = await fileStorage.getAllStirlingFileStubs();
|
||||||
|
|
||||||
// Get safe files to delete (respecting shared lineages)
|
// Get safe files to delete (respecting shared lineages)
|
||||||
const filesToDelete = getSafeFilesToDelete([deletedFileId as string], allStoredStubs);
|
const filesToDelete = getSafeFilesToDelete([deletedFileId], allStoredStubs);
|
||||||
|
|
||||||
console.log(`Safely deleting files for ${fileToRemove.name}:`, filesToDelete);
|
|
||||||
|
|
||||||
// Clear from selection immediately
|
// Clear from selection immediately
|
||||||
setSelectedFileIds(prev => prev.filter(id => !filesToDelete.includes(id)));
|
setSelectedFileIds(prev => prev.filter(id => !filesToDelete.includes(id)));
|
||||||
@ -292,7 +283,7 @@ export const FileManagerProvider: React.FC<FileManagerProviderProps> = ({
|
|||||||
|
|
||||||
// Also remove deleted files from any other file's history cache
|
// Also remove deleted files from any other file's history cache
|
||||||
for (const [mainFileId, historyFiles] of newCache.entries()) {
|
for (const [mainFileId, historyFiles] of newCache.entries()) {
|
||||||
const filteredHistory = historyFiles.filter(histFile => !filesToDelete.includes(histFile.id as string));
|
const filteredHistory = historyFiles.filter(histFile => !filesToDelete.includes(histFile.id));
|
||||||
if (filteredHistory.length !== historyFiles.length) {
|
if (filteredHistory.length !== historyFiles.length) {
|
||||||
newCache.set(mainFileId, filteredHistory);
|
newCache.set(mainFileId, filteredHistory);
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
import React, { createContext, useContext, useCallback, useRef } from 'react';
|
import React, { createContext, useContext, useCallback, useRef } from 'react';
|
||||||
import { fileStorage } from '../services/fileStorage';
|
import { fileStorage } from '../services/fileStorage';
|
||||||
import { FileId } from '../types/file';
|
import { FileId } from '../types/file';
|
||||||
import { StirlingFileStub, createStirlingFile } from '../types/fileContext';
|
import { StirlingFileStub, createStirlingFile, createQuickKey } from '../types/fileContext';
|
||||||
import { generateThumbnailForFile } from '../utils/thumbnailUtils';
|
import { generateThumbnailForFile } from '../utils/thumbnailUtils';
|
||||||
|
|
||||||
const DEBUG = process.env.NODE_ENV === 'development';
|
const DEBUG = process.env.NODE_ENV === 'development';
|
||||||
@ -72,7 +72,7 @@ export function IndexedDBProvider({ children }: IndexedDBProviderProps) {
|
|||||||
size: file.size,
|
size: file.size,
|
||||||
type: file.type,
|
type: file.type,
|
||||||
lastModified: file.lastModified,
|
lastModified: file.lastModified,
|
||||||
quickKey: `${file.name}|${file.size}|${file.lastModified}`,
|
quickKey: createQuickKey(file),
|
||||||
thumbnailUrl: thumbnail,
|
thumbnailUrl: thumbnail,
|
||||||
isLeaf: true,
|
isLeaf: true,
|
||||||
createdAt: Date.now(),
|
createdAt: Date.now(),
|
||||||
|
@ -49,8 +49,11 @@ import ChangePermissionsSettings from "../components/tools/changePermissions/Cha
|
|||||||
import FlattenSettings from "../components/tools/flatten/FlattenSettings";
|
import FlattenSettings from "../components/tools/flatten/FlattenSettings";
|
||||||
import RedactSingleStepSettings from "../components/tools/redact/RedactSingleStepSettings";
|
import RedactSingleStepSettings from "../components/tools/redact/RedactSingleStepSettings";
|
||||||
import Redact from "../tools/Redact";
|
import Redact from "../tools/Redact";
|
||||||
|
import AdjustPageScale from "../tools/AdjustPageScale";
|
||||||
import { ToolId } from "../types/toolId";
|
import { ToolId } from "../types/toolId";
|
||||||
import MergeSettings from '../components/tools/merge/MergeSettings';
|
import MergeSettings from '../components/tools/merge/MergeSettings';
|
||||||
|
import { adjustPageScaleOperationConfig } from "../hooks/tools/adjustPageScale/useAdjustPageScaleOperation";
|
||||||
|
import AdjustPageScaleSettings from "../components/tools/adjustPageScale/AdjustPageScaleSettings";
|
||||||
|
|
||||||
const showPlaceholderTools = true; // Show all tools; grey out unavailable ones in UI
|
const showPlaceholderTools = true; // Show all tools; grey out unavailable ones in UI
|
||||||
|
|
||||||
@ -337,11 +340,14 @@ export function useFlatToolRegistry(): ToolRegistry {
|
|||||||
"adjust-page-size-scale": {
|
"adjust-page-size-scale": {
|
||||||
icon: <LocalIcon icon="crop-free-rounded" width="1.5rem" height="1.5rem" />,
|
icon: <LocalIcon icon="crop-free-rounded" width="1.5rem" height="1.5rem" />,
|
||||||
name: t("home.scalePages.title", "Adjust page size/scale"),
|
name: t("home.scalePages.title", "Adjust page size/scale"),
|
||||||
component: null,
|
component: AdjustPageScale,
|
||||||
|
|
||||||
description: t("home.scalePages.desc", "Change the size/scale of a page and/or its contents."),
|
description: t("home.scalePages.desc", "Change the size/scale of a page and/or its contents."),
|
||||||
categoryId: ToolCategoryId.STANDARD_TOOLS,
|
categoryId: ToolCategoryId.STANDARD_TOOLS,
|
||||||
subcategoryId: SubcategoryId.PAGE_FORMATTING,
|
subcategoryId: SubcategoryId.PAGE_FORMATTING,
|
||||||
|
maxFiles: -1,
|
||||||
|
endpoints: ["scale-pages"],
|
||||||
|
operationConfig: adjustPageScaleOperationConfig,
|
||||||
|
settingsComponent: AdjustPageScaleSettings,
|
||||||
},
|
},
|
||||||
addPageNumbers: {
|
addPageNumbers: {
|
||||||
icon: <LocalIcon icon="123-rounded" width="1.5rem" height="1.5rem" />,
|
icon: <LocalIcon icon="123-rounded" width="1.5rem" height="1.5rem" />,
|
||||||
|
@ -119,7 +119,6 @@ describe('useAddPasswordOperation', () => {
|
|||||||
test.each([
|
test.each([
|
||||||
{ property: 'toolType' as const, expectedValue: ToolType.singleFile },
|
{ property: 'toolType' as const, expectedValue: ToolType.singleFile },
|
||||||
{ property: 'endpoint' as const, expectedValue: '/api/v1/security/add-password' },
|
{ property: 'endpoint' as const, expectedValue: '/api/v1/security/add-password' },
|
||||||
{ property: 'filePrefix' as const, expectedValue: undefined },
|
|
||||||
{ property: 'operationType' as const, expectedValue: 'addPassword' }
|
{ property: 'operationType' as const, expectedValue: 'addPassword' }
|
||||||
])('should configure $property correctly', ({ property, expectedValue }) => {
|
])('should configure $property correctly', ({ property, expectedValue }) => {
|
||||||
renderHook(() => useAddPasswordOperation());
|
renderHook(() => useAddPasswordOperation());
|
||||||
|
@ -0,0 +1,29 @@
|
|||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { useToolOperation, ToolType } from '../shared/useToolOperation';
|
||||||
|
import { createStandardErrorHandler } from '../../../utils/toolErrorHandler';
|
||||||
|
import { AdjustPageScaleParameters, defaultParameters } from './useAdjustPageScaleParameters';
|
||||||
|
|
||||||
|
export const buildAdjustPageScaleFormData = (parameters: AdjustPageScaleParameters, file: File): FormData => {
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append("fileInput", file);
|
||||||
|
formData.append("scaleFactor", parameters.scaleFactor.toString());
|
||||||
|
formData.append("pageSize", parameters.pageSize);
|
||||||
|
return formData;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const adjustPageScaleOperationConfig = {
|
||||||
|
toolType: ToolType.singleFile,
|
||||||
|
buildFormData: buildAdjustPageScaleFormData,
|
||||||
|
operationType: 'adjustPageScale',
|
||||||
|
endpoint: '/api/v1/general/scale-pages',
|
||||||
|
defaultParameters,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export const useAdjustPageScaleOperation = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
return useToolOperation<AdjustPageScaleParameters>({
|
||||||
|
...adjustPageScaleOperationConfig,
|
||||||
|
getErrorMessage: createStandardErrorHandler(t('adjustPageScale.error.failed', 'An error occurred while adjusting the page scale.'))
|
||||||
|
});
|
||||||
|
};
|
@ -0,0 +1,142 @@
|
|||||||
|
import { describe, expect, test } from 'vitest';
|
||||||
|
import { renderHook, act } from '@testing-library/react';
|
||||||
|
import { useAdjustPageScaleParameters, defaultParameters, PageSize, AdjustPageScaleParametersHook } from './useAdjustPageScaleParameters';
|
||||||
|
|
||||||
|
describe('useAdjustPageScaleParameters', () => {
|
||||||
|
test('should initialize with default parameters', () => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
expect(result.current.parameters).toStrictEqual(defaultParameters);
|
||||||
|
expect(result.current.parameters.scaleFactor).toBe(1.0);
|
||||||
|
expect(result.current.parameters.pageSize).toBe(PageSize.KEEP);
|
||||||
|
});
|
||||||
|
|
||||||
|
test.each([
|
||||||
|
{ paramName: 'scaleFactor' as const, value: 0.5 },
|
||||||
|
{ paramName: 'scaleFactor' as const, value: 2.0 },
|
||||||
|
{ paramName: 'scaleFactor' as const, value: 10.0 },
|
||||||
|
{ paramName: 'pageSize' as const, value: PageSize.A4 },
|
||||||
|
{ paramName: 'pageSize' as const, value: PageSize.LETTER },
|
||||||
|
{ paramName: 'pageSize' as const, value: PageSize.LEGAL },
|
||||||
|
])('should update parameter $paramName to $value', ({ paramName, value }) => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
act(() => {
|
||||||
|
result.current.updateParameter(paramName, value);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.parameters[paramName]).toBe(value);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should reset parameters to defaults', () => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
// First, change some parameters
|
||||||
|
act(() => {
|
||||||
|
result.current.updateParameter('scaleFactor', 2.5);
|
||||||
|
result.current.updateParameter('pageSize', PageSize.A3);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.parameters.scaleFactor).toBe(2.5);
|
||||||
|
expect(result.current.parameters.pageSize).toBe(PageSize.A3);
|
||||||
|
|
||||||
|
// Then reset
|
||||||
|
act(() => {
|
||||||
|
result.current.resetParameters();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.parameters).toStrictEqual(defaultParameters);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return correct endpoint name', () => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
expect(result.current.getEndpointName()).toBe('scale-pages');
|
||||||
|
});
|
||||||
|
|
||||||
|
test.each([
|
||||||
|
{
|
||||||
|
description: 'with default parameters',
|
||||||
|
setup: () => {},
|
||||||
|
expected: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: 'with valid scale factor 0.1',
|
||||||
|
setup: (hook: AdjustPageScaleParametersHook) => {
|
||||||
|
hook.updateParameter('scaleFactor', 0.1);
|
||||||
|
},
|
||||||
|
expected: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: 'with valid scale factor 10.0',
|
||||||
|
setup: (hook: AdjustPageScaleParametersHook) => {
|
||||||
|
hook.updateParameter('scaleFactor', 10.0);
|
||||||
|
},
|
||||||
|
expected: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: 'with A4 page size',
|
||||||
|
setup: (hook: AdjustPageScaleParametersHook) => {
|
||||||
|
hook.updateParameter('pageSize', PageSize.A4);
|
||||||
|
},
|
||||||
|
expected: true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: 'with invalid scale factor 0',
|
||||||
|
setup: (hook: AdjustPageScaleParametersHook) => {
|
||||||
|
hook.updateParameter('scaleFactor', 0);
|
||||||
|
},
|
||||||
|
expected: false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
description: 'with negative scale factor',
|
||||||
|
setup: (hook: AdjustPageScaleParametersHook) => {
|
||||||
|
hook.updateParameter('scaleFactor', -0.5);
|
||||||
|
},
|
||||||
|
expected: false
|
||||||
|
}
|
||||||
|
])('should validate parameters correctly $description', ({ setup, expected }) => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
act(() => {
|
||||||
|
setup(result.current);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.validateParameters()).toBe(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle all PageSize enum values', () => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
Object.values(PageSize).forEach(pageSize => {
|
||||||
|
act(() => {
|
||||||
|
result.current.updateParameter('pageSize', pageSize);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.parameters.pageSize).toBe(pageSize);
|
||||||
|
expect(result.current.validateParameters()).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle scale factor edge cases', () => {
|
||||||
|
const { result } = renderHook(() => useAdjustPageScaleParameters());
|
||||||
|
|
||||||
|
// Test very small valid scale factor
|
||||||
|
act(() => {
|
||||||
|
result.current.updateParameter('scaleFactor', 0.01);
|
||||||
|
});
|
||||||
|
expect(result.current.validateParameters()).toBe(true);
|
||||||
|
|
||||||
|
// Test scale factor just above zero
|
||||||
|
act(() => {
|
||||||
|
result.current.updateParameter('scaleFactor', 0.001);
|
||||||
|
});
|
||||||
|
expect(result.current.validateParameters()).toBe(true);
|
||||||
|
|
||||||
|
// Test exactly zero (invalid)
|
||||||
|
act(() => {
|
||||||
|
result.current.updateParameter('scaleFactor', 0);
|
||||||
|
});
|
||||||
|
expect(result.current.validateParameters()).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,37 @@
|
|||||||
|
import { BaseParameters } from '../../../types/parameters';
|
||||||
|
import { useBaseParameters, BaseParametersHook } from '../shared/useBaseParameters';
|
||||||
|
|
||||||
|
export enum PageSize {
|
||||||
|
KEEP = 'KEEP',
|
||||||
|
A0 = 'A0',
|
||||||
|
A1 = 'A1',
|
||||||
|
A2 = 'A2',
|
||||||
|
A3 = 'A3',
|
||||||
|
A4 = 'A4',
|
||||||
|
A5 = 'A5',
|
||||||
|
A6 = 'A6',
|
||||||
|
LETTER = 'LETTER',
|
||||||
|
LEGAL = 'LEGAL'
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AdjustPageScaleParameters extends BaseParameters {
|
||||||
|
scaleFactor: number;
|
||||||
|
pageSize: PageSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const defaultParameters: AdjustPageScaleParameters = {
|
||||||
|
scaleFactor: 1.0,
|
||||||
|
pageSize: PageSize.KEEP,
|
||||||
|
};
|
||||||
|
|
||||||
|
export type AdjustPageScaleParametersHook = BaseParametersHook<AdjustPageScaleParameters>;
|
||||||
|
|
||||||
|
export const useAdjustPageScaleParameters = (): AdjustPageScaleParametersHook => {
|
||||||
|
return useBaseParameters({
|
||||||
|
defaultParameters,
|
||||||
|
endpointName: 'scale-pages',
|
||||||
|
validateFn: (params) => {
|
||||||
|
return params.scaleFactor > 0;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
@ -28,7 +28,6 @@ export const autoRenameOperationConfig = {
|
|||||||
buildFormData: buildAutoRenameFormData,
|
buildFormData: buildAutoRenameFormData,
|
||||||
operationType: 'autoRename',
|
operationType: 'autoRename',
|
||||||
endpoint: '/api/v1/misc/auto-rename',
|
endpoint: '/api/v1/misc/auto-rename',
|
||||||
filePrefix: 'autoRename_',
|
|
||||||
preserveBackendFilename: true, // Use filename from backend response headers
|
preserveBackendFilename: true, // Use filename from backend response headers
|
||||||
defaultParameters,
|
defaultParameters,
|
||||||
} as const;
|
} as const;
|
||||||
|
@ -113,7 +113,6 @@ describe('useChangePermissionsOperation', () => {
|
|||||||
test.each([
|
test.each([
|
||||||
{ property: 'toolType' as const, expectedValue: ToolType.singleFile },
|
{ property: 'toolType' as const, expectedValue: ToolType.singleFile },
|
||||||
{ property: 'endpoint' as const, expectedValue: '/api/v1/security/add-password' },
|
{ property: 'endpoint' as const, expectedValue: '/api/v1/security/add-password' },
|
||||||
{ property: 'filePrefix' as const, expectedValue: undefined },
|
|
||||||
{ property: 'operationType' as const, expectedValue: 'change-permissions' }
|
{ property: 'operationType' as const, expectedValue: 'change-permissions' }
|
||||||
])('should configure $property correctly', ({ property, expectedValue }) => {
|
])('should configure $property correctly', ({ property, expectedValue }) => {
|
||||||
renderHook(() => useChangePermissionsOperation());
|
renderHook(() => useChangePermissionsOperation());
|
||||||
|
@ -17,7 +17,6 @@ export const flattenOperationConfig = {
|
|||||||
buildFormData: buildFlattenFormData,
|
buildFormData: buildFlattenFormData,
|
||||||
operationType: 'flatten',
|
operationType: 'flatten',
|
||||||
endpoint: '/api/v1/misc/flatten',
|
endpoint: '/api/v1/misc/flatten',
|
||||||
filePrefix: 'flattened_', // Will be overridden in hook with translation
|
|
||||||
multiFileEndpoint: false,
|
multiFileEndpoint: false,
|
||||||
defaultParameters,
|
defaultParameters,
|
||||||
} as const;
|
} as const;
|
||||||
@ -27,7 +26,6 @@ export const useFlattenOperation = () => {
|
|||||||
|
|
||||||
return useToolOperation<FlattenParameters>({
|
return useToolOperation<FlattenParameters>({
|
||||||
...flattenOperationConfig,
|
...flattenOperationConfig,
|
||||||
filePrefix: t('flatten.filenamePrefix', 'flattened') + '_',
|
|
||||||
getErrorMessage: createStandardErrorHandler(t('flatten.error.failed', 'An error occurred while flattening the PDF.'))
|
getErrorMessage: createStandardErrorHandler(t('flatten.error.failed', 'An error occurred while flattening the PDF.'))
|
||||||
});
|
});
|
||||||
};
|
};
|
@ -37,7 +37,6 @@ export const redactOperationConfig = {
|
|||||||
throw new Error('Manual redaction not yet implemented');
|
throw new Error('Manual redaction not yet implemented');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
filePrefix: 'redacted_',
|
|
||||||
defaultParameters,
|
defaultParameters,
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
|
@ -97,7 +97,6 @@ describe('useRemovePasswordOperation', () => {
|
|||||||
test.each([
|
test.each([
|
||||||
{ property: 'toolType' as const, expectedValue: ToolType.singleFile },
|
{ property: 'toolType' as const, expectedValue: ToolType.singleFile },
|
||||||
{ property: 'endpoint' as const, expectedValue: '/api/v1/security/remove-password' },
|
{ property: 'endpoint' as const, expectedValue: '/api/v1/security/remove-password' },
|
||||||
{ property: 'filePrefix' as const, expectedValue: undefined },
|
|
||||||
{ property: 'operationType' as const, expectedValue: 'removePassword' }
|
{ property: 'operationType' as const, expectedValue: 'removePassword' }
|
||||||
])('should configure $property correctly', ({ property, expectedValue }) => {
|
])('should configure $property correctly', ({ property, expectedValue }) => {
|
||||||
renderHook(() => useRemovePasswordOperation());
|
renderHook(() => useRemovePasswordOperation());
|
||||||
|
@ -1,157 +0,0 @@
|
|||||||
/**
|
|
||||||
* Custom hook for on-demand file history loading
|
|
||||||
* Replaces automatic history extraction during file loading
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { useState, useCallback } from 'react';
|
|
||||||
import { FileId } from '../types/file';
|
|
||||||
import { StirlingFileStub } from '../types/fileContext';
|
|
||||||
// loadFileHistoryOnDemand removed - history now comes from IndexedDB directly
|
|
||||||
|
|
||||||
interface FileHistoryState {
|
|
||||||
originalFileId?: string;
|
|
||||||
versionNumber?: number;
|
|
||||||
parentFileId?: FileId;
|
|
||||||
toolHistory?: Array<{
|
|
||||||
toolName: string;
|
|
||||||
timestamp: number;
|
|
||||||
parameters?: Record<string, any>;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface UseFileHistoryResult {
|
|
||||||
historyData: FileHistoryState | null;
|
|
||||||
isLoading: boolean;
|
|
||||||
error: string | null;
|
|
||||||
loadHistory: (file: File, fileId: FileId, updateFileStub?: (id: FileId, updates: Partial<StirlingFileStub>) => void) => Promise<void>;
|
|
||||||
clearHistory: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useFileHistory(): UseFileHistoryResult {
|
|
||||||
const [historyData, setHistoryData] = useState<FileHistoryState | null>(null);
|
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const loadHistory = useCallback(async (
|
|
||||||
_file: File,
|
|
||||||
_fileId: FileId,
|
|
||||||
_updateFileStub?: (id: FileId, updates: Partial<StirlingFileStub>) => void
|
|
||||||
) => {
|
|
||||||
setIsLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// History is now loaded from IndexedDB, not PDF metadata
|
|
||||||
// This function is deprecated
|
|
||||||
throw new Error('loadFileHistoryOnDemand is deprecated - use IndexedDB history directly');
|
|
||||||
} catch (err) {
|
|
||||||
const errorMessage = err instanceof Error ? err.message : 'Failed to load file history';
|
|
||||||
setError(errorMessage);
|
|
||||||
setHistoryData(null);
|
|
||||||
} finally {
|
|
||||||
setIsLoading(false);
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const clearHistory = useCallback(() => {
|
|
||||||
setHistoryData(null);
|
|
||||||
setError(null);
|
|
||||||
setIsLoading(false);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return {
|
|
||||||
historyData,
|
|
||||||
isLoading,
|
|
||||||
error,
|
|
||||||
loadHistory,
|
|
||||||
clearHistory
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Hook for managing history state of multiple files
|
|
||||||
*/
|
|
||||||
export function useMultiFileHistory() {
|
|
||||||
const [historyCache, setHistoryCache] = useState<Map<FileId, FileHistoryState>>(new Map());
|
|
||||||
const [loadingFiles, setLoadingFiles] = useState<Set<FileId>>(new Set());
|
|
||||||
const [errors, setErrors] = useState<Map<FileId, string>>(new Map());
|
|
||||||
|
|
||||||
const loadFileHistory = useCallback(async (
|
|
||||||
_file: File,
|
|
||||||
fileId: FileId,
|
|
||||||
_updateFileStub?: (id: FileId, updates: Partial<StirlingFileStub>) => void
|
|
||||||
) => {
|
|
||||||
// Don't reload if already loaded or currently loading
|
|
||||||
if (historyCache.has(fileId) || loadingFiles.has(fileId)) {
|
|
||||||
return historyCache.get(fileId) || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
setLoadingFiles(prev => new Set(prev).add(fileId));
|
|
||||||
setErrors(prev => {
|
|
||||||
const newErrors = new Map(prev);
|
|
||||||
newErrors.delete(fileId);
|
|
||||||
return newErrors;
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
// History is now loaded from IndexedDB, not PDF metadata
|
|
||||||
// This function is deprecated
|
|
||||||
throw new Error('loadFileHistoryOnDemand is deprecated - use IndexedDB history directly');
|
|
||||||
} catch (err) {
|
|
||||||
const errorMessage = err instanceof Error ? err.message : 'Failed to load file history';
|
|
||||||
setErrors(prev => new Map(prev).set(fileId, errorMessage));
|
|
||||||
return null;
|
|
||||||
} finally {
|
|
||||||
setLoadingFiles(prev => {
|
|
||||||
const newSet = new Set(prev);
|
|
||||||
newSet.delete(fileId);
|
|
||||||
return newSet;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, [historyCache, loadingFiles]);
|
|
||||||
|
|
||||||
const getHistory = useCallback((fileId: FileId) => {
|
|
||||||
return historyCache.get(fileId) || null;
|
|
||||||
}, [historyCache]);
|
|
||||||
|
|
||||||
const isLoadingHistory = useCallback((fileId: FileId) => {
|
|
||||||
return loadingFiles.has(fileId);
|
|
||||||
}, [loadingFiles]);
|
|
||||||
|
|
||||||
const getError = useCallback((fileId: FileId) => {
|
|
||||||
return errors.get(fileId) || null;
|
|
||||||
}, [errors]);
|
|
||||||
|
|
||||||
const clearHistory = useCallback((fileId: FileId) => {
|
|
||||||
setHistoryCache(prev => {
|
|
||||||
const newCache = new Map(prev);
|
|
||||||
newCache.delete(fileId);
|
|
||||||
return newCache;
|
|
||||||
});
|
|
||||||
setErrors(prev => {
|
|
||||||
const newErrors = new Map(prev);
|
|
||||||
newErrors.delete(fileId);
|
|
||||||
return newErrors;
|
|
||||||
});
|
|
||||||
setLoadingFiles(prev => {
|
|
||||||
const newSet = new Set(prev);
|
|
||||||
newSet.delete(fileId);
|
|
||||||
return newSet;
|
|
||||||
});
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const clearAllHistory = useCallback(() => {
|
|
||||||
setHistoryCache(new Map());
|
|
||||||
setLoadingFiles(new Set());
|
|
||||||
setErrors(new Map());
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return {
|
|
||||||
loadFileHistory,
|
|
||||||
getHistory,
|
|
||||||
isLoadingHistory,
|
|
||||||
getError,
|
|
||||||
clearHistory,
|
|
||||||
clearAllHistory
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,450 +0,0 @@
|
|||||||
/**
|
|
||||||
* PDF Metadata Service - File History Tracking with pdf-lib
|
|
||||||
*
|
|
||||||
* Handles injection and extraction of file history metadata in PDFs using pdf-lib.
|
|
||||||
* This service embeds file history directly into PDF metadata, making it persistent
|
|
||||||
* across all tool operations and downloads.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { PDFDocument } from 'pdf-lib';
|
|
||||||
import { ContentCache, type CacheConfig } from '../utils/ContentCache';
|
|
||||||
|
|
||||||
const DEBUG = process.env.NODE_ENV === 'development';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Tool operation metadata for history tracking
|
|
||||||
* Note: Parameters removed for security - sensitive data like passwords should not be stored
|
|
||||||
*/
|
|
||||||
export interface ToolOperation {
|
|
||||||
toolName: string;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Complete file history metadata structure
|
|
||||||
* Uses standard PDF metadata fields (Creator, Producer, CreationDate, ModificationDate)
|
|
||||||
* and embeds Stirling-specific history in keywords
|
|
||||||
*/
|
|
||||||
export interface PDFHistoryMetadata {
|
|
||||||
stirlingHistory: {
|
|
||||||
originalFileId: string;
|
|
||||||
parentFileId?: string;
|
|
||||||
versionNumber: number;
|
|
||||||
toolChain: ToolOperation[];
|
|
||||||
formatVersion: '1.0';
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Service for managing PDF file history metadata
|
|
||||||
*/
|
|
||||||
export class PDFMetadataService {
|
|
||||||
private static readonly HISTORY_KEYWORD = 'stirling-history';
|
|
||||||
private static readonly FORMAT_VERSION = '1.0';
|
|
||||||
|
|
||||||
private metadataCache: ContentCache<PDFHistoryMetadata | null>;
|
|
||||||
|
|
||||||
constructor(cacheConfig?: Partial<CacheConfig>) {
|
|
||||||
const defaultConfig: CacheConfig = {
|
|
||||||
ttl: 5 * 60 * 1000, // 5 minutes
|
|
||||||
maxSize: 100, // 100 files
|
|
||||||
enableWarnings: DEBUG
|
|
||||||
};
|
|
||||||
|
|
||||||
this.metadataCache = new ContentCache<PDFHistoryMetadata | null>({
|
|
||||||
...defaultConfig,
|
|
||||||
...cacheConfig
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Inject file history metadata into a PDF
|
|
||||||
*/
|
|
||||||
async injectHistoryMetadata(
|
|
||||||
pdfBytes: ArrayBuffer,
|
|
||||||
originalFileId: string,
|
|
||||||
parentFileId?: string,
|
|
||||||
toolChain: ToolOperation[] = [],
|
|
||||||
versionNumber: number = 1
|
|
||||||
): Promise<ArrayBuffer> {
|
|
||||||
try {
|
|
||||||
const pdfDoc = await PDFDocument.load(pdfBytes, { ignoreEncryption: true });
|
|
||||||
|
|
||||||
const historyMetadata: PDFHistoryMetadata = {
|
|
||||||
stirlingHistory: {
|
|
||||||
originalFileId,
|
|
||||||
parentFileId,
|
|
||||||
versionNumber,
|
|
||||||
toolChain: [...toolChain],
|
|
||||||
formatVersion: PDFMetadataService.FORMAT_VERSION
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Set Stirling-PDF identification fields only (don't touch dates)
|
|
||||||
pdfDoc.setCreator('Stirling-PDF');
|
|
||||||
pdfDoc.setProducer('Stirling-PDF');
|
|
||||||
|
|
||||||
// Embed history metadata in keywords field (most compatible)
|
|
||||||
const historyJson = JSON.stringify(historyMetadata);
|
|
||||||
const existingKeywords = pdfDoc.getKeywords();
|
|
||||||
|
|
||||||
// Handle keywords as array (pdf-lib stores them as array)
|
|
||||||
let keywordList: string[] = [];
|
|
||||||
if (Array.isArray(existingKeywords)) {
|
|
||||||
// Remove any existing history keywords to avoid duplicates
|
|
||||||
keywordList = existingKeywords.filter(keyword =>
|
|
||||||
!keyword.startsWith(`${PDFMetadataService.HISTORY_KEYWORD}:`)
|
|
||||||
);
|
|
||||||
} else if (existingKeywords) {
|
|
||||||
// Remove history from single keyword string
|
|
||||||
const cleanKeyword = this.extractHistoryFromKeywords(existingKeywords, true);
|
|
||||||
if (cleanKeyword) {
|
|
||||||
keywordList = [cleanKeyword];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add our new history metadata as a keyword (replacing any previous history)
|
|
||||||
const historyKeyword = `${PDFMetadataService.HISTORY_KEYWORD}:${historyJson}`;
|
|
||||||
keywordList.push(historyKeyword);
|
|
||||||
|
|
||||||
pdfDoc.setKeywords(keywordList);
|
|
||||||
|
|
||||||
if (DEBUG) {
|
|
||||||
console.log('📄 Injected PDF history metadata:', {
|
|
||||||
originalFileId,
|
|
||||||
parentFileId,
|
|
||||||
versionNumber,
|
|
||||||
toolCount: toolChain.length
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const savedPdfBytes = await pdfDoc.save();
|
|
||||||
// Convert Uint8Array to ArrayBuffer
|
|
||||||
const arrayBuffer = new ArrayBuffer(savedPdfBytes.byteLength);
|
|
||||||
new Uint8Array(arrayBuffer).set(savedPdfBytes);
|
|
||||||
return arrayBuffer;
|
|
||||||
} catch (error) {
|
|
||||||
if (DEBUG) console.error('📄 Failed to inject PDF metadata:', error);
|
|
||||||
// Return original bytes if metadata injection fails
|
|
||||||
return pdfBytes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract file history metadata from a PDF
|
|
||||||
*/
|
|
||||||
async extractHistoryMetadata(pdfBytes: ArrayBuffer): Promise<PDFHistoryMetadata | null> {
|
|
||||||
const cacheKey = this.metadataCache.generateKeyFromBuffer(pdfBytes);
|
|
||||||
|
|
||||||
// Check cache first
|
|
||||||
const cached = this.metadataCache.get(cacheKey);
|
|
||||||
if (cached !== null) {
|
|
||||||
return cached;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract from PDF
|
|
||||||
const metadata = await this.extractHistoryMetadataInternal(pdfBytes);
|
|
||||||
|
|
||||||
// Cache the result
|
|
||||||
this.metadataCache.set(cacheKey, metadata);
|
|
||||||
|
|
||||||
return metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Internal method for actual PDF metadata extraction
|
|
||||||
*/
|
|
||||||
private async extractHistoryMetadataInternal(pdfBytes: ArrayBuffer): Promise<PDFHistoryMetadata | null> {
|
|
||||||
try {
|
|
||||||
const pdfDoc = await PDFDocument.load(pdfBytes, { ignoreEncryption: true });
|
|
||||||
const keywords = pdfDoc.getKeywords();
|
|
||||||
|
|
||||||
// Look for history keyword directly in array or convert to string
|
|
||||||
let historyJson: string | null = null;
|
|
||||||
|
|
||||||
if (Array.isArray(keywords)) {
|
|
||||||
// Search through keywords array for our history keyword - get the LATEST one
|
|
||||||
const historyKeywords = keywords.filter(keyword =>
|
|
||||||
keyword.startsWith(`${PDFMetadataService.HISTORY_KEYWORD}:`)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (historyKeywords.length > 0) {
|
|
||||||
// If multiple history keywords exist, parse all and get the highest version number
|
|
||||||
let latestVersionNumber = 0;
|
|
||||||
|
|
||||||
for (const historyKeyword of historyKeywords) {
|
|
||||||
try {
|
|
||||||
const json = historyKeyword.substring(`${PDFMetadataService.HISTORY_KEYWORD}:`.length);
|
|
||||||
const parsed = JSON.parse(json) as PDFHistoryMetadata;
|
|
||||||
|
|
||||||
if (parsed.stirlingHistory.versionNumber > latestVersionNumber) {
|
|
||||||
latestVersionNumber = parsed.stirlingHistory.versionNumber;
|
|
||||||
historyJson = json;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Silent fallback for corrupted history
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (keywords) {
|
|
||||||
// Fallback to string parsing
|
|
||||||
historyJson = this.extractHistoryFromKeywords(keywords);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!historyJson) return null;
|
|
||||||
|
|
||||||
const metadata = JSON.parse(historyJson) as PDFHistoryMetadata;
|
|
||||||
|
|
||||||
// Validate metadata structure
|
|
||||||
if (!this.isValidHistoryMetadata(metadata)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return metadata;
|
|
||||||
} catch (error) {
|
|
||||||
if (DEBUG) console.error('📄 Failed to extract PDF metadata:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a tool operation to existing PDF history
|
|
||||||
*/
|
|
||||||
async addToolOperation(
|
|
||||||
pdfBytes: ArrayBuffer,
|
|
||||||
toolOperation: ToolOperation
|
|
||||||
): Promise<ArrayBuffer> {
|
|
||||||
try {
|
|
||||||
// Extract existing history
|
|
||||||
const existingHistory = await this.extractHistoryMetadata(pdfBytes);
|
|
||||||
|
|
||||||
if (!existingHistory) {
|
|
||||||
if (DEBUG) console.warn('📄 No existing history found, cannot add tool operation');
|
|
||||||
return pdfBytes;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add new tool operation
|
|
||||||
const updatedToolChain = [...existingHistory.stirlingHistory.toolChain, toolOperation];
|
|
||||||
|
|
||||||
// Re-inject with updated history
|
|
||||||
return await this.injectHistoryMetadata(
|
|
||||||
pdfBytes,
|
|
||||||
existingHistory.stirlingHistory.originalFileId,
|
|
||||||
existingHistory.stirlingHistory.parentFileId,
|
|
||||||
updatedToolChain,
|
|
||||||
existingHistory.stirlingHistory.versionNumber
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
if (DEBUG) console.error('📄 Failed to add tool operation:', error);
|
|
||||||
return pdfBytes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new version of a PDF with incremented version number
|
|
||||||
*/
|
|
||||||
async createNewVersion(
|
|
||||||
pdfBytes: ArrayBuffer,
|
|
||||||
parentFileId: string,
|
|
||||||
toolOperation: ToolOperation
|
|
||||||
): Promise<ArrayBuffer> {
|
|
||||||
try {
|
|
||||||
const parentHistory = await this.extractHistoryMetadata(pdfBytes);
|
|
||||||
|
|
||||||
const originalFileId = parentHistory?.stirlingHistory.originalFileId || parentFileId;
|
|
||||||
const parentToolChain = parentHistory?.stirlingHistory.toolChain || [];
|
|
||||||
const newVersionNumber = (parentHistory?.stirlingHistory.versionNumber || 0) + 1;
|
|
||||||
|
|
||||||
// Create new tool chain with the new operation
|
|
||||||
const newToolChain = [...parentToolChain, toolOperation];
|
|
||||||
|
|
||||||
return await this.injectHistoryMetadata(
|
|
||||||
pdfBytes,
|
|
||||||
originalFileId,
|
|
||||||
parentFileId,
|
|
||||||
newToolChain,
|
|
||||||
newVersionNumber
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
if (DEBUG) console.error('📄 Failed to create new version:', error);
|
|
||||||
return pdfBytes;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract standard PDF document metadata
|
|
||||||
*/
|
|
||||||
async extractStandardMetadata(pdfBytes: ArrayBuffer): Promise<{
|
|
||||||
title?: string;
|
|
||||||
author?: string;
|
|
||||||
subject?: string;
|
|
||||||
creator?: string;
|
|
||||||
producer?: string;
|
|
||||||
creationDate?: Date;
|
|
||||||
modificationDate?: Date;
|
|
||||||
} | null> {
|
|
||||||
try {
|
|
||||||
const pdfDoc = await PDFDocument.load(pdfBytes, { ignoreEncryption: true });
|
|
||||||
|
|
||||||
return {
|
|
||||||
title: pdfDoc.getTitle() || undefined,
|
|
||||||
author: pdfDoc.getAuthor() || undefined,
|
|
||||||
subject: pdfDoc.getSubject() || undefined,
|
|
||||||
creator: pdfDoc.getCreator() || undefined,
|
|
||||||
producer: pdfDoc.getProducer() || undefined,
|
|
||||||
creationDate: pdfDoc.getCreationDate() || undefined,
|
|
||||||
modificationDate: pdfDoc.getModificationDate() || undefined
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
if (DEBUG) console.warn('📄 Failed to extract standard PDF metadata:', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verify that tool preserved standard PDF metadata
|
|
||||||
* Logs warnings for tools that strip metadata
|
|
||||||
*/
|
|
||||||
async verifyMetadataPreservation(
|
|
||||||
originalBytes: ArrayBuffer,
|
|
||||||
processedBytes: ArrayBuffer,
|
|
||||||
toolName: string
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
const [originalMetadata, processedMetadata] = await Promise.all([
|
|
||||||
this.extractStandardMetadata(originalBytes),
|
|
||||||
this.extractStandardMetadata(processedBytes)
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (!originalMetadata || !processedMetadata) return;
|
|
||||||
|
|
||||||
// Check each metadata field for preservation
|
|
||||||
const issues: string[] = [];
|
|
||||||
|
|
||||||
if (originalMetadata.title && !processedMetadata.title) {
|
|
||||||
issues.push('Title stripped');
|
|
||||||
}
|
|
||||||
if (originalMetadata.author && !processedMetadata.author) {
|
|
||||||
issues.push('Author stripped');
|
|
||||||
}
|
|
||||||
if (originalMetadata.subject && !processedMetadata.subject) {
|
|
||||||
issues.push('Subject stripped');
|
|
||||||
}
|
|
||||||
if (originalMetadata.creationDate && !processedMetadata.creationDate) {
|
|
||||||
issues.push('CreationDate stripped');
|
|
||||||
}
|
|
||||||
if (originalMetadata.creationDate && processedMetadata.creationDate &&
|
|
||||||
Math.abs(originalMetadata.creationDate.getTime() - processedMetadata.creationDate.getTime()) > 1000) {
|
|
||||||
issues.push(`CreationDate modified (${originalMetadata.creationDate.toISOString()} → ${processedMetadata.creationDate.toISOString()})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: We don't check ModificationDate preservation since we use File.lastModified as source of truth
|
|
||||||
|
|
||||||
if (issues.length > 0) {
|
|
||||||
console.warn(`⚠️ METADATA LOSS: Tool '${toolName}' did not preserve PDF metadata:`, issues.join(', '));
|
|
||||||
console.warn(`⚠️ This backend tool should be updated to preserve standard PDF metadata fields.`);
|
|
||||||
} else {
|
|
||||||
console.log(`✅ METADATA PRESERVED: Tool '${toolName}' correctly preserved all PDF metadata`);
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
if (DEBUG) console.warn(`📄 Failed to verify metadata preservation for ${toolName}:`, error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if a PDF has Stirling history metadata
|
|
||||||
*/
|
|
||||||
async hasStirlingHistory(pdfBytes: ArrayBuffer): Promise<boolean> {
|
|
||||||
const metadata = await this.extractHistoryMetadata(pdfBytes);
|
|
||||||
return metadata !== null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get version information from PDF
|
|
||||||
*/
|
|
||||||
async getVersionInfo(pdfBytes: ArrayBuffer): Promise<{
|
|
||||||
originalFileId: string;
|
|
||||||
versionNumber: number;
|
|
||||||
toolCount: number;
|
|
||||||
parentFileId?: string;
|
|
||||||
} | null> {
|
|
||||||
const metadata = await this.extractHistoryMetadata(pdfBytes);
|
|
||||||
if (!metadata) return null;
|
|
||||||
|
|
||||||
return {
|
|
||||||
originalFileId: metadata.stirlingHistory.originalFileId,
|
|
||||||
versionNumber: metadata.stirlingHistory.versionNumber,
|
|
||||||
toolCount: metadata.stirlingHistory.toolChain.length,
|
|
||||||
parentFileId: metadata.stirlingHistory.parentFileId
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Embed history JSON in keywords field with delimiter
|
|
||||||
*/
|
|
||||||
private embedHistoryInKeywords(existingKeywords: string, historyJson: string): string {
|
|
||||||
// Remove any existing history
|
|
||||||
const cleanKeywords = this.extractHistoryFromKeywords(existingKeywords, true) || existingKeywords;
|
|
||||||
|
|
||||||
// Add new history with delimiter
|
|
||||||
const historyKeyword = `${PDFMetadataService.HISTORY_KEYWORD}:${historyJson}`;
|
|
||||||
|
|
||||||
if (cleanKeywords.trim()) {
|
|
||||||
return `${cleanKeywords.trim()} ${historyKeyword}`;
|
|
||||||
}
|
|
||||||
return historyKeyword;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract history JSON from keywords field
|
|
||||||
*/
|
|
||||||
private extractHistoryFromKeywords(keywords: string, returnRemainder = false): string | null {
|
|
||||||
const historyPrefix = `${PDFMetadataService.HISTORY_KEYWORD}:`;
|
|
||||||
const historyIndex = keywords.indexOf(historyPrefix);
|
|
||||||
|
|
||||||
if (historyIndex === -1) return null;
|
|
||||||
|
|
||||||
const historyStart = historyIndex + historyPrefix.length;
|
|
||||||
let historyEnd = keywords.length;
|
|
||||||
|
|
||||||
// Look for the next keyword (space followed by non-JSON content)
|
|
||||||
// Simple heuristic: find space followed by word that doesn't look like JSON
|
|
||||||
const afterHistory = keywords.substring(historyStart);
|
|
||||||
const nextSpaceIndex = afterHistory.indexOf(' ');
|
|
||||||
if (nextSpaceIndex > 0) {
|
|
||||||
const afterSpace = afterHistory.substring(nextSpaceIndex + 1);
|
|
||||||
if (afterSpace && !afterSpace.trim().startsWith('{')) {
|
|
||||||
historyEnd = historyStart + nextSpaceIndex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (returnRemainder) {
|
|
||||||
// Return keywords with history removed
|
|
||||||
const before = keywords.substring(0, historyIndex);
|
|
||||||
const after = keywords.substring(historyEnd);
|
|
||||||
return `${before}${after}`.replace(/\s+/g, ' ').trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
return keywords.substring(historyStart, historyEnd).trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate metadata structure
|
|
||||||
*/
|
|
||||||
private isValidHistoryMetadata(metadata: any): metadata is PDFHistoryMetadata {
|
|
||||||
return metadata &&
|
|
||||||
metadata.stirlingHistory &&
|
|
||||||
typeof metadata.stirlingHistory.originalFileId === 'string' &&
|
|
||||||
typeof metadata.stirlingHistory.versionNumber === 'number' &&
|
|
||||||
Array.isArray(metadata.stirlingHistory.toolChain) &&
|
|
||||||
metadata.stirlingHistory.formatVersion === PDFMetadataService.FORMAT_VERSION;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export singleton instance with optimized cache settings
|
|
||||||
export const pdfMetadataService = new PDFMetadataService({
|
|
||||||
ttl: 10 * 60 * 1000, // 10 minutes for PDF metadata (longer than default)
|
|
||||||
maxSize: 50, // Smaller cache for memory efficiency
|
|
||||||
enableWarnings: DEBUG
|
|
||||||
});
|
|
58
frontend/src/tools/AdjustPageScale.tsx
Normal file
58
frontend/src/tools/AdjustPageScale.tsx
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import { useTranslation } from "react-i18next";
|
||||||
|
import { createToolFlow } from "../components/tools/shared/createToolFlow";
|
||||||
|
import AdjustPageScaleSettings from "../components/tools/adjustPageScale/AdjustPageScaleSettings";
|
||||||
|
import { useAdjustPageScaleParameters } from "../hooks/tools/adjustPageScale/useAdjustPageScaleParameters";
|
||||||
|
import { useAdjustPageScaleOperation } from "../hooks/tools/adjustPageScale/useAdjustPageScaleOperation";
|
||||||
|
import { useBaseTool } from "../hooks/tools/shared/useBaseTool";
|
||||||
|
import { BaseToolProps, ToolComponent } from "../types/tool";
|
||||||
|
import { useAdjustPageScaleTips } from "../components/tooltips/useAdjustPageScaleTips";
|
||||||
|
|
||||||
|
const AdjustPageScale = (props: BaseToolProps) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const adjustPageScaleTips = useAdjustPageScaleTips();
|
||||||
|
|
||||||
|
const base = useBaseTool(
|
||||||
|
'adjustPageScale',
|
||||||
|
useAdjustPageScaleParameters,
|
||||||
|
useAdjustPageScaleOperation,
|
||||||
|
props
|
||||||
|
);
|
||||||
|
|
||||||
|
return createToolFlow({
|
||||||
|
files: {
|
||||||
|
selectedFiles: base.selectedFiles,
|
||||||
|
isCollapsed: base.hasResults,
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
title: "Settings",
|
||||||
|
isCollapsed: base.settingsCollapsed,
|
||||||
|
onCollapsedClick: base.settingsCollapsed ? base.handleSettingsReset : undefined,
|
||||||
|
tooltip: adjustPageScaleTips,
|
||||||
|
content: (
|
||||||
|
<AdjustPageScaleSettings
|
||||||
|
parameters={base.params.parameters}
|
||||||
|
onParameterChange={base.params.updateParameter}
|
||||||
|
disabled={base.endpointLoading}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
executeButton: {
|
||||||
|
text: t("adjustPageScale.submit", "Adjust Page Scale"),
|
||||||
|
isVisible: !base.hasResults,
|
||||||
|
loadingText: t("loading"),
|
||||||
|
onClick: base.handleExecute,
|
||||||
|
disabled: !base.params.validateParameters() || !base.hasFiles || !base.endpointEnabled,
|
||||||
|
},
|
||||||
|
review: {
|
||||||
|
isVisible: base.hasResults,
|
||||||
|
operation: base.operation,
|
||||||
|
title: t("adjustPageScale.title", "Page Scale Results"),
|
||||||
|
onFileClick: base.handleThumbnailClick,
|
||||||
|
onUndo: base.handleUndo,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export default AdjustPageScale as ToolComponent;
|
@ -15,17 +15,6 @@ export interface ToolOperation {
|
|||||||
timestamp: number;
|
timestamp: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* File history information extracted from PDF metadata
|
|
||||||
* Timestamps come from standard PDF metadata fields (CreationDate, ModificationDate)
|
|
||||||
*/
|
|
||||||
export interface FileHistoryInfo {
|
|
||||||
originalFileId: string;
|
|
||||||
parentFileId?: FileId;
|
|
||||||
versionNumber: number;
|
|
||||||
toolChain: ToolOperation[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base file metadata shared between storage and runtime layers
|
* Base file metadata shared between storage and runtime layers
|
||||||
* Contains all common file properties and history tracking
|
* Contains all common file properties and history tracking
|
||||||
@ -59,47 +48,3 @@ export interface BaseFileMetadata {
|
|||||||
modificationDate?: Date;
|
modificationDate?: Date;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileMetadata has been replaced with StoredFileMetadata from '../services/fileStorage'
|
|
||||||
// This ensures clear type relationships and eliminates duplication
|
|
||||||
|
|
||||||
|
|
||||||
export interface StorageConfig {
|
|
||||||
useIndexedDB: boolean;
|
|
||||||
maxFileSize: number; // Maximum size per file in bytes
|
|
||||||
maxTotalStorage: number; // Maximum total storage in bytes
|
|
||||||
warningThreshold: number; // Warning threshold (percentage 0-1)
|
|
||||||
}
|
|
||||||
|
|
||||||
export const defaultStorageConfig: StorageConfig = {
|
|
||||||
useIndexedDB: true,
|
|
||||||
maxFileSize: 100 * 1024 * 1024, // 100MB per file
|
|
||||||
maxTotalStorage: 1024 * 1024 * 1024, // 1GB default, will be updated dynamically
|
|
||||||
warningThreshold: 0.8, // Warn at 80% capacity
|
|
||||||
};
|
|
||||||
|
|
||||||
// Calculate and update storage limit: half of available storage or 10GB, whichever is smaller
|
|
||||||
export const initializeStorageConfig = async (): Promise<StorageConfig> => {
|
|
||||||
const tenGB = 10 * 1024 * 1024 * 1024; // 10GB in bytes
|
|
||||||
const oneGB = 1024 * 1024 * 1024; // 1GB fallback
|
|
||||||
|
|
||||||
let maxTotalStorage = oneGB; // Default fallback
|
|
||||||
|
|
||||||
// Try to estimate available storage
|
|
||||||
if ('storage' in navigator && 'estimate' in navigator.storage) {
|
|
||||||
try {
|
|
||||||
const estimate = await navigator.storage.estimate();
|
|
||||||
if (estimate.quota) {
|
|
||||||
const halfQuota = estimate.quota / 2;
|
|
||||||
maxTotalStorage = Math.min(halfQuota, tenGB);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.warn('Could not estimate storage quota, using 1GB default:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
...defaultStorageConfig,
|
|
||||||
maxTotalStorage
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
@ -1,173 +0,0 @@
|
|||||||
/**
|
|
||||||
* Generic content cache with TTL and size limits
|
|
||||||
* Reusable for any cached data with configurable parameters
|
|
||||||
*/
|
|
||||||
|
|
||||||
const DEBUG = process.env.NODE_ENV === 'development';
|
|
||||||
|
|
||||||
interface CacheEntry<T> {
|
|
||||||
value: T;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CacheConfig {
|
|
||||||
/** Time-to-live in milliseconds */
|
|
||||||
ttl: number;
|
|
||||||
/** Maximum number of cache entries */
|
|
||||||
maxSize: number;
|
|
||||||
/** Enable cleanup warnings in development */
|
|
||||||
enableWarnings?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ContentCache<T> {
|
|
||||||
private cache = new Map<string, CacheEntry<T>>();
|
|
||||||
private hits = 0;
|
|
||||||
private misses = 0;
|
|
||||||
|
|
||||||
constructor(private readonly config: CacheConfig) {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cached value if valid
|
|
||||||
*/
|
|
||||||
get(key: string): T | null {
|
|
||||||
const entry = this.cache.get(key);
|
|
||||||
|
|
||||||
if (!entry) {
|
|
||||||
this.misses++;
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if expired
|
|
||||||
if (Date.now() - entry.timestamp > this.config.ttl) {
|
|
||||||
this.cache.delete(key);
|
|
||||||
this.misses++;
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.hits++;
|
|
||||||
return entry.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set cached value
|
|
||||||
*/
|
|
||||||
set(key: string, value: T): void {
|
|
||||||
// Clean up before adding if at capacity
|
|
||||||
if (this.cache.size >= this.config.maxSize) {
|
|
||||||
this.evictOldest();
|
|
||||||
}
|
|
||||||
|
|
||||||
this.cache.set(key, {
|
|
||||||
value,
|
|
||||||
timestamp: Date.now()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate cache key from ArrayBuffer content
|
|
||||||
*/
|
|
||||||
generateKeyFromBuffer(data: ArrayBuffer): string {
|
|
||||||
// Use file size + hash of first/last bytes as cache key
|
|
||||||
const view = new Uint8Array(data);
|
|
||||||
const size = data.byteLength;
|
|
||||||
const start = Array.from(view.slice(0, 16)).join(',');
|
|
||||||
const end = Array.from(view.slice(-16)).join(',');
|
|
||||||
return `${size}-${this.simpleHash(start + end)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate cache key from string content
|
|
||||||
*/
|
|
||||||
generateKeyFromString(content: string): string {
|
|
||||||
return this.simpleHash(content);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if key exists and is valid
|
|
||||||
*/
|
|
||||||
has(key: string): boolean {
|
|
||||||
return this.get(key) !== null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all cache entries
|
|
||||||
*/
|
|
||||||
clear(): void {
|
|
||||||
this.cache.clear();
|
|
||||||
this.hits = 0;
|
|
||||||
this.misses = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cache statistics
|
|
||||||
*/
|
|
||||||
getStats(): {
|
|
||||||
size: number;
|
|
||||||
maxSize: number;
|
|
||||||
hitRate: number;
|
|
||||||
hits: number;
|
|
||||||
misses: number;
|
|
||||||
} {
|
|
||||||
const total = this.hits + this.misses;
|
|
||||||
const hitRate = total > 0 ? this.hits / total : 0;
|
|
||||||
|
|
||||||
return {
|
|
||||||
size: this.cache.size,
|
|
||||||
maxSize: this.config.maxSize,
|
|
||||||
hitRate,
|
|
||||||
hits: this.hits,
|
|
||||||
misses: this.misses
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cleanup expired entries
|
|
||||||
*/
|
|
||||||
cleanup(): void {
|
|
||||||
const now = Date.now();
|
|
||||||
let cleaned = 0;
|
|
||||||
|
|
||||||
for (const [key, entry] of this.cache.entries()) {
|
|
||||||
if (now - entry.timestamp > this.config.ttl) {
|
|
||||||
this.cache.delete(key);
|
|
||||||
cleaned++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (DEBUG && this.config.enableWarnings && this.cache.size > this.config.maxSize * 0.8) {
|
|
||||||
console.warn(`📦 ContentCache: High cache usage (${this.cache.size}/${this.config.maxSize}), cleaned ${cleaned} expired entries`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Evict oldest entry when at capacity
|
|
||||||
*/
|
|
||||||
private evictOldest(): void {
|
|
||||||
let oldestKey: string | null = null;
|
|
||||||
let oldestTime = Date.now();
|
|
||||||
|
|
||||||
for (const [key, entry] of this.cache.entries()) {
|
|
||||||
if (entry.timestamp < oldestTime) {
|
|
||||||
oldestTime = entry.timestamp;
|
|
||||||
oldestKey = key;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (oldestKey) {
|
|
||||||
this.cache.delete(oldestKey);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Simple hash function for cache keys
|
|
||||||
*/
|
|
||||||
private simpleHash(str: string): string {
|
|
||||||
let hash = 0;
|
|
||||||
for (let i = 0; i < str.length; i++) {
|
|
||||||
const char = str.charCodeAt(i);
|
|
||||||
hash = ((hash << 5) - hash) + char;
|
|
||||||
hash = hash & hash; // Convert to 32-bit integer
|
|
||||||
}
|
|
||||||
return Math.abs(hash).toString(36);
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
x
Reference in New Issue
Block a user