mirror of
https://github.com/Stirling-Tools/Stirling-PDF.git
synced 2025-06-06 18:30:57 +00:00
test
This commit is contained in:
parent
ae8221d134
commit
072de14f38
18
.github/workflows/file_hash_generation.yml
vendored
18
.github/workflows/file_hash_generation.yml
vendored
@ -36,8 +36,11 @@ jobs:
|
|||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.zip.CRC32;
|
import java.util.zip.CRC32;
|
||||||
|
|
||||||
public class HashGenerator {
|
public class HashGenerator {
|
||||||
@ -46,9 +49,9 @@ jobs:
|
|||||||
private static final String OUTPUT_FILE = "src/main/resources/reference-hash.json";
|
private static final String OUTPUT_FILE = "src/main/resources/reference-hash.json";
|
||||||
|
|
||||||
// Text file extensions that need normalization
|
// Text file extensions that need normalization
|
||||||
private static final List<String> TEXT_EXTENSIONS = List.of(
|
private static final Set<String> TEXT_EXTENSIONS = new HashSet<>(Arrays.asList(
|
||||||
"html", "htm", "css", "js", "txt", "md", "xml", "json", "csv", "properties"
|
"html", "htm", "css", "js", "txt", "md", "xml", "json", "csv", "properties"
|
||||||
);
|
));
|
||||||
|
|
||||||
public static void main(String[] args) throws IOException {
|
public static void main(String[] args) throws IOException {
|
||||||
List<FileEntry> entries = new ArrayList<>();
|
List<FileEntry> entries = new ArrayList<>();
|
||||||
@ -92,21 +95,22 @@ jobs:
|
|||||||
String fullPath = basePath + "/" + relativePath;
|
String fullPath = basePath + "/" + relativePath;
|
||||||
|
|
||||||
// Calculate hash
|
// Calculate hash
|
||||||
String hash = calculateFileHash(file);
|
String hash = calculateFileHash(file.toPath());
|
||||||
|
|
||||||
entries.add(new FileEntry(fullPath, hash));
|
entries.add(new FileEntry(fullPath, hash));
|
||||||
|
System.out.println("Processed: " + fullPath + " => " + hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String calculateFileHash(File file) throws IOException {
|
private static String calculateFileHash(Path filePath) throws IOException {
|
||||||
String extension = getFileExtension(file.getName()).toLowerCase();
|
String extension = getFileExtension(filePath.toString()).toLowerCase();
|
||||||
boolean isTextFile = TEXT_EXTENSIONS.contains(extension);
|
boolean isTextFile = TEXT_EXTENSIONS.contains(extension);
|
||||||
|
|
||||||
if (isTextFile) {
|
if (isTextFile) {
|
||||||
return calculateNormalizedTextFileHash(file.toPath());
|
return calculateNormalizedTextFileHash(filePath);
|
||||||
} else {
|
} else {
|
||||||
return calculateBinaryFileHash(file.toPath());
|
return calculateBinaryFileHash(filePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +19,6 @@ import java.util.HashMap;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.zip.CRC32;
|
import java.util.zip.CRC32;
|
||||||
import java.util.zip.Checksum;
|
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
public class TemplateIntegrityConfig {
|
public class TemplateIntegrityConfig {
|
||||||
@ -40,12 +39,20 @@ public class TemplateIntegrityConfig {
|
|||||||
@Value("${template.normalize.line.endings:true}")
|
@Value("${template.normalize.line.endings:true}")
|
||||||
private boolean normalizeLineEndings;
|
private boolean normalizeLineEndings;
|
||||||
|
|
||||||
|
@Value("${template.integrity.enabled:true}")
|
||||||
|
private boolean integrityCheckEnabled;
|
||||||
|
|
||||||
public TemplateIntegrityConfig(ResourceLoader resourceLoader) {
|
public TemplateIntegrityConfig(ResourceLoader resourceLoader) {
|
||||||
this.resourceLoader = resourceLoader;
|
this.resourceLoader = resourceLoader;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public boolean templatesModified() {
|
public boolean templatesModified() {
|
||||||
|
if (!integrityCheckEnabled) {
|
||||||
|
logger.info("Template integrity checking is disabled");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Log configuration information
|
// Log configuration information
|
||||||
logConfiguration();
|
logConfiguration();
|
||||||
@ -84,51 +91,20 @@ public class TemplateIntegrityConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void logHashComparison(String relativePath, String referenceHash, String currentHash) {
|
|
||||||
logger.info("Hash comparison for {}: reference={}, current={}",
|
|
||||||
relativePath, referenceHash, currentHash);
|
|
||||||
|
|
||||||
// Try to convert between formats to see if that's the issue
|
|
||||||
try {
|
|
||||||
// If currentHash is hex, convert to decimal
|
|
||||||
if (currentHash.matches("[0-9a-fA-F]+") && !currentHash.matches("[0-9]+")) {
|
|
||||||
long decimalValue = Long.parseLong(currentHash, 16);
|
|
||||||
logger.info("Converting current hex hash to decimal: {} -> {}",
|
|
||||||
currentHash, decimalValue);
|
|
||||||
|
|
||||||
// Check if the decimal conversion matches the reference
|
|
||||||
if (String.valueOf(decimalValue).equals(referenceHash)) {
|
|
||||||
logger.warn("Hash format mismatch detected! Current hash is in hex format but reference is decimal.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If referenceHash could be decimal but currentHash is decimal too
|
|
||||||
// Check if reference could be hex
|
|
||||||
if (referenceHash.matches("[0-9]+") && currentHash.matches("[0-9]+")) {
|
|
||||||
// Try treating reference as hex and converting to decimal
|
|
||||||
try {
|
|
||||||
long hexAsDecimal = Long.parseLong(referenceHash, 16);
|
|
||||||
if (String.valueOf(hexAsDecimal).equals(currentHash)) {
|
|
||||||
logger.warn("Reference hash might be incorrectly interpreted as decimal when it's hex!");
|
|
||||||
}
|
|
||||||
} catch (NumberFormatException e) {
|
|
||||||
// Not a valid hex number, which is fine
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.info("Error during hash format conversion check", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void logReferenceHashes(Map<String, String> hashes) {
|
private void logReferenceHashes(Map<String, String> hashes) {
|
||||||
logger.info("Loaded {} reference hashes", hashes.size());
|
logger.info("Loaded {} reference hashes", hashes.size());
|
||||||
|
|
||||||
// Log a few example HTML files for infoging
|
// Log a few example files for debugging
|
||||||
logger.info("Sample of reference hashes:");
|
logger.info("Sample of reference hashes:");
|
||||||
hashes.entrySet().stream()
|
hashes.entrySet().stream()
|
||||||
.filter(e -> e.getKey().endsWith(".html"))
|
|
||||||
.limit(5)
|
.limit(5)
|
||||||
.forEach(e -> logger.info(" - {}: {}", e.getKey(), e.getValue()));
|
.forEach(e -> logger.info(" - {}: {}", e.getKey(), e.getValue()));
|
||||||
|
|
||||||
|
// Also log sample PDF files if any
|
||||||
|
hashes.entrySet().stream()
|
||||||
|
.filter(e -> e.getKey().endsWith(".pdf"))
|
||||||
|
.limit(3)
|
||||||
|
.forEach(e -> logger.info(" - PDF file: {}: {}", e.getKey(), e.getValue()));
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, String> parseHashJson(String json) {
|
private Map<String, String> parseHashJson(String json) {
|
||||||
@ -144,7 +120,7 @@ public class TemplateIntegrityConfig {
|
|||||||
result.put(parts[0], parts[1]);
|
result.put(parts[0], parts[1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info("Loaded {} reference hashes", result.size());
|
logger.debug("Loaded {} reference hashes", result.size());
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,7 +148,7 @@ public class TemplateIntegrityConfig {
|
|||||||
Path directory = Paths.get(resource.getURI());
|
Path directory = Paths.get(resource.getURI());
|
||||||
|
|
||||||
if (Files.exists(directory) && Files.isDirectory(directory)) {
|
if (Files.exists(directory) && Files.isDirectory(directory)) {
|
||||||
// Walk the directory tree
|
// Walk the directory tree - process ALL files, not just text files
|
||||||
Files.walk(directory)
|
Files.walk(directory)
|
||||||
.filter(Files::isRegularFile)
|
.filter(Files::isRegularFile)
|
||||||
.forEach(path -> {
|
.forEach(path -> {
|
||||||
@ -182,8 +158,8 @@ public class TemplateIntegrityConfig {
|
|||||||
String basePath = dirPath.replace("/", "");
|
String basePath = dirPath.replace("/", "");
|
||||||
String relativePath = basePath + "/" + directory.relativize(path).toString().replace("\\", "/");
|
String relativePath = basePath + "/" + directory.relativize(path).toString().replace("\\", "/");
|
||||||
|
|
||||||
// info log the path normalization
|
// Debug log the path normalization
|
||||||
logger.info("Processing file: {} -> {}", path, relativePath);
|
logger.debug("Processing file: {} -> {}", path, relativePath);
|
||||||
|
|
||||||
// Check if this file is in our reference
|
// Check if this file is in our reference
|
||||||
String referenceHash = referenceHashes.get(relativePath);
|
String referenceHash = referenceHashes.get(relativePath);
|
||||||
@ -208,9 +184,10 @@ public class TemplateIntegrityConfig {
|
|||||||
// Check if the hash matches
|
// Check if the hash matches
|
||||||
String currentHash = computeFileHash(path);
|
String currentHash = computeFileHash(path);
|
||||||
|
|
||||||
|
// Log hash comparison for debugging
|
||||||
logger.info("Hash comparison for {}: reference={}, current={}",
|
logger.info("Hash comparison for {}: reference={}, current={}",
|
||||||
relativePath, referenceHash, currentHash);
|
relativePath, referenceHash, currentHash);
|
||||||
logHashComparison(relativePath, referenceHash, currentHash);
|
|
||||||
if (!currentHash.equals(referenceHash)) {
|
if (!currentHash.equals(referenceHash)) {
|
||||||
logger.warn("Modified file detected: {} (expected hash: {}, actual hash: {})",
|
logger.warn("Modified file detected: {} (expected hash: {}, actual hash: {})",
|
||||||
relativePath, referenceHash, currentHash);
|
relativePath, referenceHash, currentHash);
|
||||||
@ -232,7 +209,7 @@ public class TemplateIntegrityConfig {
|
|||||||
if (!modified.get()) {
|
if (!modified.get()) {
|
||||||
for (Map.Entry<String, Boolean> entry : foundFiles.entrySet()) {
|
for (Map.Entry<String, Boolean> entry : foundFiles.entrySet()) {
|
||||||
if (!entry.getValue()) {
|
if (!entry.getValue()) {
|
||||||
// File was in reference but not found
|
// File was in reference but not found - report ALL missing files
|
||||||
logger.warn("Missing file detected: {} (expected hash: {})",
|
logger.warn("Missing file detected: {} (expected hash: {})",
|
||||||
entry.getKey(), referenceHashes.get(entry.getKey()));
|
entry.getKey(), referenceHashes.get(entry.getKey()));
|
||||||
return true;
|
return true;
|
||||||
@ -247,15 +224,14 @@ public class TemplateIntegrityConfig {
|
|||||||
// For text files like HTML, normalize content before hashing
|
// For text files like HTML, normalize content before hashing
|
||||||
String extension = getFileExtension(filePath.toString()).toLowerCase();
|
String extension = getFileExtension(filePath.toString()).toLowerCase();
|
||||||
if (normalizeLineEndings && isTextFile(extension)) {
|
if (normalizeLineEndings && isTextFile(extension)) {
|
||||||
return computeNormalizedTextFileHash(filePath, extension);
|
return computeNormalizedTextFileHash(filePath);
|
||||||
} else {
|
} else {
|
||||||
// Binary files use direct CRC32
|
// Binary files use direct CRC32
|
||||||
return computeBinaryFileHash(filePath);
|
return computeBinaryFileHash(filePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String computeNormalizedTextFileHash(Path filePath) throws IOException {
|
||||||
private String computeNormalizedTextFileHash(Path filePath, String extension) throws IOException {
|
|
||||||
byte[] content = Files.readAllBytes(filePath);
|
byte[] content = Files.readAllBytes(filePath);
|
||||||
String text = new String(content, StandardCharsets.UTF_8);
|
String text = new String(content, StandardCharsets.UTF_8);
|
||||||
|
|
||||||
@ -264,25 +240,19 @@ public class TemplateIntegrityConfig {
|
|||||||
|
|
||||||
byte[] normalizedBytes = text.getBytes(StandardCharsets.UTF_8);
|
byte[] normalizedBytes = text.getBytes(StandardCharsets.UTF_8);
|
||||||
|
|
||||||
Checksum checksum = new CRC32();
|
CRC32 checksum = new CRC32();
|
||||||
checksum.update(normalizedBytes, 0, normalizedBytes.length);
|
checksum.update(normalizedBytes, 0, normalizedBytes.length);
|
||||||
return String.valueOf(checksum.getValue());
|
return String.valueOf(checksum.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
private String computeBinaryFileHash(Path filePath) throws IOException {
|
private String computeBinaryFileHash(Path filePath) throws IOException {
|
||||||
Checksum checksum = new CRC32();
|
byte[] content = Files.readAllBytes(filePath);
|
||||||
|
|
||||||
try (InputStream is = Files.newInputStream(filePath)) {
|
CRC32 checksum = new CRC32();
|
||||||
byte[] buffer = new byte[BUFFER_SIZE];
|
checksum.update(content, 0, content.length);
|
||||||
int bytesRead;
|
|
||||||
while ((bytesRead = is.read(buffer)) != -1) {
|
|
||||||
checksum.update(buffer, 0, bytesRead);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return decimal representation to match GitHub workflow
|
|
||||||
return String.valueOf(checksum.getValue());
|
return String.valueOf(checksum.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getFileExtension(String filename) {
|
private String getFileExtension(String filename) {
|
||||||
int lastDot = filename.lastIndexOf('.');
|
int lastDot = filename.lastIndexOf('.');
|
||||||
if (lastDot == -1 || lastDot == filename.length() - 1) {
|
if (lastDot == -1 || lastDot == filename.length() - 1) {
|
||||||
@ -299,5 +269,4 @@ private String computeBinaryFileHash(Path filePath) throws IOException {
|
|||||||
extension.equals("xml") || extension.equals("json") ||
|
extension.equals("xml") || extension.equals("json") ||
|
||||||
extension.equals("csv") || extension.equals("properties");
|
extension.equals("csv") || extension.equals("properties");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -9,7 +9,6 @@
|
|||||||
<div id="page-container">
|
<div id="page-container">
|
||||||
<div id="content-wrap">
|
<div id="content-wrap">
|
||||||
<th:block th:insert="~{fragments/navbar.html :: navbar}"></th:block>
|
<th:block th:insert="~{fragments/navbar.html :: navbar}"></th:block>
|
||||||
|
|
||||||
<br class="d-md-none">
|
<br class="d-md-none">
|
||||||
<!-- Features -->
|
<!-- Features -->
|
||||||
<script th:src="@{'/js/homecard.js'}"></script>
|
<script th:src="@{'/js/homecard.js'}"></script>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user