Compare commits

..

No commits in common. "main" and "v0.46.1" have entirely different histories.

1253 changed files with 8919 additions and 61918 deletions

View File

@ -1,12 +0,0 @@
{
"permissions": {
"allow": [
"Bash(chmod:*)",
"Bash(mkdir:*)",
"Bash(./gradlew:*)",
"Bash(grep:*)",
"Bash(cat:*)"
],
"deny": []
}
}

View File

@ -119,9 +119,7 @@
"EditorConfig.EditorConfig", // EditorConfig support for maintaining consistent coding styles
"ms-azuretools.vscode-docker", // Docker extension for Visual Studio Code
"charliermarsh.ruff", // Ruff extension for Ruff language support
"github.vscode-github-actions", // GitHub Actions extension for Visual Studio Code
"stylelint.vscode-stylelint", // Stylelint extension for CSS and SCSS linting
"redhat.vscode-yaml" // YAML extension for Visual Studio Code
"github.vscode-github-actions" // GitHub Actions extension for Visual Studio Code
]
}
},

View File

@ -1,7 +1,6 @@
root = true
[*]
charset = utf-8
indent_style = space
indent_size = 4
end_of_line = lf
@ -27,26 +26,6 @@ trim_trailing_whitespace = false
[*.js]
indent_size = 2
[*.css]
# CSS files typically use an indent size of 2 spaces for better readability and alignment with community standards.
indent_size = 2
[*.yaml]
# YAML files use an indent size of 2 spaces to maintain consistency with common YAML formatting practices.
indent_size = 2
insert_final_newline = false
trim_trailing_whitespace = false
[*.yml]
# YML files follow the same conventions as YAML files, using an indent size of 2 spaces.
indent_size = 2
insert_final_newline = false
trim_trailing_whitespace = false
[*.json]
# JSON files use an indent size of 2 spaces, which is the standard for JSON formatting.
indent_size = 2
[*.jsonc]
# JSONC (JSON with comments) files also follow the standard JSON formatting with an indent size of 2 spaces.
indent_size = 2

View File

@ -1,9 +1,5 @@
# Formatting
5f771b785130154ed47952635b7acef371ffe0ec
7fa5e130d99227c2202ebddfdd91348176ec0c7b
14d4fbb2a36195eedb034785e5a5ff6a47f268c6
ee8030c1c4148062cde15c49c67d04ef03930c55
fcd41924f5f261febfa9d9a92994671f3ebc97d6
# Normalize files
55d4fda01b2f39f5b7d7b4fda5214bd7ff0fd5dd

14
.gitattributes vendored
View File

@ -1,10 +1,10 @@
* text=auto eol=lf
# Ignore all JavaScript files in a directory
app/core/src/main/resources/static/pdfjs/* linguist-vendored
app/core/src/main/resources/static/pdfjs/** linguist-vendored
app/core/src/main/resources/static/pdfjs-legacy/* linguist-vendored
app/core/src/main/resources/static/pdfjs-legacy/** linguist-vendored
app/core/src/main/resources/static/css/bootstrap-icons.css linguist-vendored
app/core/src/main/resources/static/css/bootstrap.min.css linguist-vendored
app/core/src/main/resources/static/css/fonts/* linguist-vendored
src/main/resources/static/pdfjs/* linguist-vendored
src/main/resources/static/pdfjs/** linguist-vendored
src/main/resources/static/pdfjs-legacy/* linguist-vendored
src/main/resources/static/pdfjs-legacy/** linguist-vendored
src/main/resources/static/css/bootstrap-icons.css linguist-vendored
src/main/resources/static/css/bootstrap.min.css linguist-vendored
src/main/resources/static/css/fonts/* linguist-vendored

2
.github/CODEOWNERS vendored
View File

@ -1,2 +1,2 @@
# All PRs to V1 must be approved by Frooodle
* @Frooodle @reecebrowne @Ludy87 @DarioGii @ConnorYoh @EthanHealy01
* @Frooodle @reecebrowne @Ludy87 @DarioGii @ConnorYoh

View File

@ -1,33 +0,0 @@
name: 'Setup GitHub App Bot'
description: 'Generates a GitHub App Token and configures Git for a bot'
inputs:
app-id:
description: 'GitHub App ID'
required: True
private-key:
description: 'GitHub App Private Key'
required: True
outputs:
token:
description: 'Generated GitHub App Token'
value: ${{ steps.generate-token.outputs.token }}
committer:
description: 'Committer string for Git'
value: "${{ steps.generate-token.outputs.app-slug }}[bot] <${{ steps.generate-token.outputs.app-slug }}[bot]@users.noreply.github.com>"
app-slug:
description: 'GitHub App slug'
value: ${{ steps.generate-token.outputs.app-slug }}
runs:
using: 'composite'
steps:
- name: Generate a GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ inputs.app-id }}
private-key: ${{ inputs.private-key }}
- name: Configure Git
run: |
git config --global user.name "${{ steps.generate-token.outputs.app-slug }}[bot]"
git config --global user.email "${{ steps.generate-token.outputs.app-slug }}[bot]@users.noreply.github.com"
shell: bash

View File

@ -1,29 +0,0 @@
build: &build
- build.gradle
- app/(common|core|proprietary)/build.gradle
app: &app
- app/(common|core|proprietary)/src/main/java/**
openapi: &openapi
- build.gradle
- app/(common|core|proprietary)/build.gradle
- app/(common|core|proprietary)/src/main/java/**
project: &project
- app/(common|core|proprietary)/src/(main|test)/java/**
- app/(common|core|proprietary)/build.gradle
- 'app/(common|core|proprietary)/src/(main|test)/resources/**/!(messages_*.properties|*.md)*'
- exampleYmlFiles/**
- gradle/**
- libs/**
- testing/**
- build.gradle
- Dockerfile
- Dockerfile.fat
- Dockerfile.ultra-lite
- gradle.properties
- gradlew
- gradlew.bat
- launch4jConfig.xml
- settings.gradle

View File

@ -1,13 +0,0 @@
{
"repo_devs": [
"Frooodle",
"sf298",
"Ludy87",
"LaserKaspar",
"sbplat",
"reecebrowne",
"DarioGii",
"ConnorYoh",
"EthanHealy01"
]
}

View File

@ -1,13 +0,0 @@
You are a professional software engineer specializing in reviewing pull request titles.
Your job is to analyze a git diff and an existing PR title, then evaluate and improve the PR title.
You must:
- Always return valid JSON
- Only return the JSON response (no Markdown, no formatting)
- Use one of these conventional commit types at the beginning of the title: build, chore, ci, docs, feat, fix, perf, refactor, revert, style, test
- Use lowercase only, no emojis, no trailing period
- Ensure the title is between 5 and 72 printable ASCII characters
- Never let spelling or grammar errors affect the rating
- If the PR title is rated 6 or higher and only contains spelling or grammar mistakes, correct it - do not rephrase it
- If the PR title is rated below 6, generate a new, better title based on the diff

View File

@ -1,157 +0,0 @@
version: 1
labels:
- label: "Bugfix"
title: '^fix(\([^)]*\))?:|^fix:.*'
- label: "enhancement"
title: '^feat(\([^)]*\))?:|^feat:.*'
- label: "build"
title: '^build(\([^)]*\))?:|^build:.*'
- label: "chore"
title: '^chore(\([^)]*\))?:|^chore:.*'
- label: "ci"
title: '^ci(\([^)]*\))?:|^ci:.*'
- label: "ci"
title: '^.*\(ci\):.*'
- label: "perf"
title: '^perf(\([^)]*\))?:|^perf:.*'
- label: "refactor"
title: '^refactor(\([^)]*\))?:|^refactor:.*'
- label: "revert"
title: '^revert(\([^)]*\))?:|^revert:.*'
- label: "style"
title: '^style(\([^)]*\))?:|^style:.*'
- label: "Documentation"
title: '^docs(\([^)]*\))?:|^docs:.*'
- label: "Documentation"
title: '^.*\(docs\):.*'
- label: "dependencies"
title: '^deps(\([^)]*\))?:|^deps:.*'
- label: "dependencies"
title: '^.*\(deps\):.*'
- label: 'API'
title: '.*openapi.*|.*swagger.*|.*api.*'
- label: 'Translation'
files:
- 'app/core/src/main/resources/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}.properties'
- 'scripts/ignore_translation.toml'
- 'app/core/src/main/resources/templates/fragments/languages.html'
- '.github/scripts/check_language_properties.py'
- label: 'Front End'
files:
- 'app/core/src/main/resources/templates/.*'
- 'app/proprietary/src/main/resources/templates/.*'
- 'app/core/src/main/resources/static/.*'
- 'app/proprietary/src/main/resources/static/.*'
- 'app/core/src/main/java/stirling/software/SPDF/controller/web/.*'
- 'app/core/src/main/java/stirling/software/SPDF/UI/.*'
- 'app/proprietary/src/main/java/stirling/software/proprietary/security/controller/web/.*'
- label: 'Java'
files:
- 'app/common/src/main/java/.*.java'
- 'app/proprietary/src/main/java/.*.java'
- 'app/core/src/main/java/.*.java'
- label: 'Back End'
files:
- 'app/core/src/main/java/stirling/software/SPDF/config/.*'
- 'app/core/src/main/java/stirling/software/SPDF/controller/.*'
- 'app/core/src/main/resources/settings.yml.template'
- 'app/core/src/main/resources/application.properties'
- 'app/core/src/main/resources/banner.txt'
- 'app/core/src/main/resources/static/python/png_to_webp.py'
- 'app/core/src/main/resources/static/python/split_photos.py'
- 'application.properties'
- label: 'Security'
files:
- 'app/proprietary/src/main/java/stirling/software/proprietary/security/.*'
- 'scripts/download-security-jar.sh'
- '.github/workflows/dependency-review.yml'
- '.github/workflows/scorecards.yml'
- label: 'API'
files:
- 'app/core/src/main/java/stirling/software/SPDF/config/OpenApiConfig.java'
- 'app/core/src/main/java/stirling/software/SPDF/controller/web/MetricsController.java'
- 'app/core/src/main/java/stirling/software/SPDF/controller/api/.*'
- 'app/core/src/main/java/stirling/software/SPDF/model/api/.*'
- 'app/core/src/main/java/stirling/software/SPDF/service/ApiDocService.java'
- 'app/proprietary/src/main/java/stirling/software/proprietary/security/controller/api/.*'
- 'app/core/src/main/resources/static/python/png_to_webp.py'
- 'app/core/src/main/resources/static/python/split_photos.py'
- '.github/workflows/swagger.yml'
- label: 'Documentation'
files:
- '.*.md'
- 'scripts/counter_translation.py'
- 'scripts/ignore_translation.toml'
- label: 'Docker'
files:
- '.github/workflows/build.yml'
- '.github/workflows/push-docker.yml'
- 'Dockerfile'
- 'Dockerfile.fat'
- 'Dockerfile.ultra-lite'
- 'exampleYmlFiles/.*.yml'
- 'scripts/download-security-jar.sh'
- 'scripts/init.sh'
- 'scripts/init-without-ocr.sh'
- 'scripts/installFonts.sh'
- 'test.sh'
- 'test2.sh'
- label: 'Devtools'
files:
- '.devcontainer/.*'
- 'Dockerfile.dev'
- '.vscode/.*'
- '.editorconfig'
- '.pre-commit-config'
- '.github/workflows/pre_commit.yml'
- 'devGuide/.*'
- 'devTools/.*'
- 'devTools/.*'
- label: 'Test'
files:
- 'app/common/src/test/.*'
- 'app/proprietary/src/test/.*'
- 'app/core/src/test/.*'
- 'testing/.*'
- '.github/workflows/scorecards.yml'
- 'exampleYmlFiles/test_cicd.yml'
- label: 'Github'
files:
- '.github/.*'
- label: 'Gradle'
files:
- 'gradle/.*'
- 'gradlew'
- 'gradlew.bat'
- 'settings.gradle'
- 'build.gradle'
- 'app/common/build.gradle'
- 'app/proprietary/build.gradle'
- 'app/core/build.gradle'

99
.github/labeler-config.yml vendored Normal file
View File

@ -0,0 +1,99 @@
Translation:
- changed-files:
- any-glob-to-any-file: 'src/main/resources/messages_*_*.properties'
- any-glob-to-any-file: 'scripts/ignore_translation.toml'
- any-glob-to-any-file: 'src/main/resources/templates/fragments/languages.html'
Front End:
- changed-files:
- any-glob-to-any-file: 'src/main/resources/templates/**/*'
- any-glob-to-any-file: 'src/main/resources/static/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/web/**'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/UI/**/*'
Java:
- changed-files:
- any-glob-to-any-file: 'src/main/java/**/*.java'
Back End:
- changed-files:
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/**/*'
- any-glob-to-any-file: 'src/main/resources/settings.yml.template'
- any-glob-to-any-file: 'src/main/resources/application.properties'
- any-glob-to-any-file: 'src/main/resources/banner.txt'
- any-glob-to-any-file: 'scripts/png_to_webp.py'
- any-glob-to-any-file: 'split_photos.py'
Security:
- changed-files:
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/interfaces/DatabaseInterface.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/security/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/api/DatabaseController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/api/EmailController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/api/H2SQLController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/web/AccountWebController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/web/DatabaseWebController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/api/UserController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/api/Email.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/exception/BackupNotFoundException.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/exception/NoProviderFoundExceptionjava'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/provider/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/AuthenticationType.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/ApiKeyAuthenticationToken.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/AttemptCounter.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/Authority.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/PersistentLogin.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/SessionEntity.java'
- any-glob-to-any-file: 'scripts/download-security-jar.sh'
- any-glob-to-any-file: '.github/workflows/dependency-review.yml'
- any-glob-to-any-file: '.github/workflows/scorecards.yml'
API:
- changed-files:
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/OpenApiConfig.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/web/MetricsController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/api/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/model/api/**/*'
- any-glob-to-any-file: 'scripts/png_to_webp.py'
- any-glob-to-any-file: 'split_photos.py'
- any-glob-to-any-file: '.github/workflows/swagger.yml'
Documentation:
- changed-files:
- any-glob-to-any-file: '**/*.md'
- any-glob-to-any-file: 'scripts/counter_translation.py'
- any-glob-to-any-file: 'scripts/ignore_translation.toml'
Docker:
- changed-files:
- any-glob-to-any-file: '.github/workflows/build.yml'
- any-glob-to-any-file: '.github/workflows/push-docker.yml'
- any-glob-to-any-file: 'Dockerfile'
- any-glob-to-any-file: 'Dockerfile.fat'
- any-glob-to-any-file: 'Dockerfile.ultra-lite'
- any-glob-to-any-file: 'exampleYmlFiles/*.yml'
- any-glob-to-any-file: 'scripts/download-security-jar.sh'
- any-glob-to-any-file: 'scripts/init.sh'
- any-glob-to-any-file: 'scripts/init-without-ocr.sh'
- any-glob-to-any-file: 'scripts/installFonts.sh'
- any-glob-to-any-file: 'test.sh'
- any-glob-to-any-file: 'test2.sh'
Devtools:
- changed-files:
- any-glob-to-any-file: '.devcontainer/**/*'
- any-glob-to-any-file: 'Dockerfile.dev'
Test:
- changed-files:
- any-glob-to-any-file: 'cucumber/**/*'
- any-glob-to-any-file: 'src/test/**/*'
- any-glob-to-any-file: 'src/testing/**/*'
- any-glob-to-any-file: '.pre-commit-config'
- any-glob-to-any-file: '.github/workflows/pre_commit.yml'
- any-glob-to-any-file: '.github/workflows/scorecards.yml'
Github:
- changed-files:
- any-glob-to-any-file: '.github/**/*'

67
.github/labels.yml vendored
View File

@ -111,70 +111,3 @@
- name: "Devtools"
color: "FF9E1F"
description: "Development tools"
- name: "Bugfix"
color: "FF9E1F"
description: "Pull requests that fix bugs"
- name: "Gradle"
color: "FF9E1F"
description: "Pull requests that update Gradle code"
- name: "build"
color: "1E90FF"
description: "Changes that affect the build system or external dependencies"
- name: "chore"
color: "FFD700"
description: "Routine tasks or maintenance that don't modify src or test files"
- name: "ci"
color: "4682B4"
description: "Changes to CI configuration files and scripts"
- name: "perf"
color: "FF69B4"
description: "Changes that improve performance"
- name: "refactor"
color: "9932CC"
description: "Code changes that neither fix a bug nor add a feature"
- name: "revert"
color: "DC143C"
description: "Reverts a previous commit"
- name: "style"
color: "FFA500"
description: "Changes that do not affect the meaning of the code (formatting, etc.)"
- name: "admin"
color: "195055"
- name: "codex"
color: "ededed"
description: null
- name: "Github"
color: "0052CC"
- name: "github_actions"
color: "000000"
description: "Pull requests that update GitHub Actions code"
- name: "needs-changes"
color: "A65A86"
- name: "on-hold"
color: "2526F9"
- name: "python"
color: "2b67c6"
description: "Pull requests that update Python code"
- name: "size:L"
color: "eb9500"
description: "This PR changes 100-499 lines ignoring generated files."
- name: "size:M"
color: "ebb800"
description: "This PR changes 30-99 lines ignoring generated files."
- name: "size:S"
color: "77b800"
description: "This PR changes 10-29 lines ignoring generated files."
- name: "size:XL"
color: "ff823f"
description: "This PR changes 500-999 lines ignoring generated files."
- name: "size:XS"
color: "00ff00"
description: "This PR changes 0-9 lines ignoring generated files."
- name: "size:XXL"
color: "ffb8b8"
description: "This PR changes 1000+ lines ignoring generated files."
- name: "to research"
color: "FBCA04"
- name: "pr-deployed"
color: "00FF00"
description: "Pull request has been deployed to a test environment"

View File

@ -1,6 +1,5 @@
# Description of Changes
<!--
Please provide a summary of the changes, including:
- What was changed
@ -8,7 +7,6 @@ Please provide a summary of the changes, including:
- Any challenges encountered
Closes #(issue_number)
-->
---
@ -17,15 +15,15 @@ Closes #(issue_number)
### General
- [ ] I have read the [Contribution Guidelines](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/CONTRIBUTING.md)
- [ ] I have read the [Stirling-PDF Developer Guide](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/DeveloperGuide.md) (if applicable)
- [ ] I have read the [How to add new languages to Stirling-PDF](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/HowToAddNewLanguage.md) (if applicable)
- [ ] I have read the [Stirling-PDF Developer Guide](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/DeveloperGuide.md) (if applicable)
- [ ] I have read the [How to add new languages to Stirling-PDF](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/HowToAddNewLanguage.md) (if applicable)
- [ ] I have performed a self-review of my own code
- [ ] My changes generate no new warnings
### Documentation
- [ ] I have updated relevant docs on [Stirling-PDF's doc repo](https://github.com/Stirling-Tools/Stirling-Tools.github.io/blob/main/docs/) (if functionality has heavily changed)
- [ ] I have read the section [Add New Translation Tags](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/HowToAddNewLanguage.md#add-new-translation-tags) (for new translation tags only)
- [ ] I have read the section [Add New Translation Tags](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/HowToAddNewLanguage.md#add-new-translation-tags) (for new translation tags only)
### UI Changes (if applicable)
@ -33,4 +31,4 @@ Closes #(issue_number)
### Testing (if applicable)
- [ ] I have tested my changes locally. Refer to the [Testing Guide](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/devGuide/DeveloperGuide.md#6-testing) for more details.
- [ ] I have tested my changes locally. Refer to the [Testing Guide](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/DeveloperGuide.md#6-testing) for more details.

View File

@ -196,9 +196,7 @@ def check_for_differences(reference_file, file_list, branch, actor):
if len(file_list) == 1:
file_arr = file_list[0].split()
base_dir = os.path.abspath(
os.path.join(os.getcwd(), "app", "core", "src", "main", "resources")
)
base_dir = os.path.abspath(os.path.join(os.getcwd(), "src", "main", "resources"))
for file_path in file_arr:
file_normpath = os.path.normpath(file_path)
@ -218,20 +216,10 @@ def check_for_differences(reference_file, file_list, branch, actor):
or (
# only local windows command
not file_normpath.startswith(
os.path.join(
"", "app", "core", "src", "main", "resources", "messages_"
)
os.path.join("", "src", "main", "resources", "messages_")
)
and not file_normpath.startswith(
os.path.join(
os.getcwd(),
"app",
"core",
"src",
"main",
"resources",
"messages_",
)
os.path.join(os.getcwd(), "src", "main", "resources", "messages_")
)
)
or not file_normpath.endswith(".properties")
@ -329,7 +317,7 @@ def check_for_differences(reference_file, file_list, branch, actor):
report.append("## ❌ Overall Check Status: **_Failed_**")
report.append("")
report.append(
f"@{actor} please check your translation if it conforms to the standard. Follow the format of [messages_en_GB.properties](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/app/core/src/main/resources/messages_en_GB.properties)"
f"@{actor} please check your translation if it conforms to the standard. Follow the format of [messages_en_GB.properties](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/src/main/resources/messages_en_GB.properties)"
)
else:
report.append("## ✅ Overall Check Status: **_Success_**")
@ -389,13 +377,7 @@ if __name__ == "__main__":
else:
file_list = glob.glob(
os.path.join(
os.getcwd(),
"app",
"core",
"src",
"main",
"resources",
"messages_*.properties",
os.getcwd(), "src", "main", "resources", "messages_*.properties"
)
)
update_missing_keys(args.reference_file, file_list)

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --generate-hashes --output-file='.github\scripts\requirements_pre_commit.txt' --strip-extras '.github\scripts\requirements_pre_commit.in'
# pip-compile --generate-hashes --output-file='.github\scripts\requirements_pre_commit.txt' '.github\scripts\requirements_pre_commit.in'
#
cfgv==3.4.0 \
--hash=sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9 \
@ -12,25 +12,25 @@ distlib==0.3.9 \
--hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \
--hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403
# via virtualenv
filelock==3.18.0 \
--hash=sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2 \
--hash=sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de
filelock==3.16.1 \
--hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \
--hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435
# via virtualenv
identify==2.6.12 \
--hash=sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2 \
--hash=sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6
identify==2.6.5 \
--hash=sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566 \
--hash=sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc
# via pre-commit
nodeenv==1.9.1 \
--hash=sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f \
--hash=sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9
# via pre-commit
platformdirs==4.3.8 \
--hash=sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc \
--hash=sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4
platformdirs==4.3.6 \
--hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
--hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
# via virtualenv
pre-commit==4.2.0 \
--hash=sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146 \
--hash=sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd
pre-commit==4.0.1 \
--hash=sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2 \
--hash=sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878
# via -r .github\scripts\requirements_pre_commit.in
pyyaml==6.0.2 \
--hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \
@ -87,7 +87,7 @@ pyyaml==6.0.2 \
--hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \
--hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
# via pre-commit
virtualenv==20.31.2 \
--hash=sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11 \
--hash=sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af
virtualenv==20.28.1 \
--hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \
--hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329
# via pre-commit

View File

@ -2,9 +2,9 @@
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile --generate-hashes --output-file='.github\scripts\requirements_sync_readme.txt' --strip-extras '.github\scripts\requirements_sync_readme.in'
# pip-compile --generate-hashes --output-file='.github\scripts\requirements_sync_readme.txt' '.github\scripts\requirements_sync_readme.in'
#
tomlkit==0.13.3 \
--hash=sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1 \
--hash=sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0
tomlkit==0.13.2 \
--hash=sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde \
--hash=sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79
# via -r .github\scripts\requirements_sync_readme.in

View File

@ -6,18 +6,20 @@ on:
permissions:
contents: read
pull-requests: read
issues: write # Required for adding reactions to comments
pull-requests: read # Required for reading PR information
jobs:
check-comment:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: read
if: |
github.event.issue.pull_request &&
(
contains(github.event.comment.body, 'prdeploy') ||
contains(github.event.comment.body, 'deploypr')
contains(github.event.comment.body, 'prdeploy') ||
contains(github.event.comment.body, 'deploypr')
)
&&
(
@ -28,7 +30,6 @@ jobs:
github.event.comment.user.login == 'sbplat' ||
github.event.comment.user.login == 'reecebrowne' ||
github.event.comment.user.login == 'DarioGii' ||
github.event.comment.user.login == 'EthanHealy01' ||
github.event.comment.user.login == 'ConnorYoh'
)
outputs:
@ -36,23 +37,18 @@ jobs:
pr_repository: ${{ steps.get-pr-info.outputs.repository }}
pr_ref: ${{ steps.get-pr-info.outputs.ref }}
comment_id: ${{ github.event.comment.id }}
disable_security: ${{ steps.check-security-flag.outputs.disable_security }}
enable_pro: ${{ steps.check-pro-flag.outputs.enable_pro }}
enable_enterprise: ${{ steps.check-pro-flag.outputs.enable_enterprise }}
enable_security: ${{ steps.check-security-flag.outputs.enable_security }}
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Checkout PR
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup GitHub App Bot
if: github.actor != 'dependabot[bot]'
id: setup-bot
uses: ./.github/actions/setup-bot
continue-on-error: true
# Generate GitHub App token
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@ -88,7 +84,7 @@ jobs:
core.setOutput('repository', repository);
core.setOutput('ref', pr.head.ref);
- name: Check for security/login flag
id: check-security-flag
env:
@ -96,36 +92,17 @@ jobs:
run: |
if [[ "$COMMENT_BODY" == *"security"* ]] || [[ "$COMMENT_BODY" == *"login"* ]]; then
echo "Security flags detected in comment"
echo "disable_security=false" >> $GITHUB_OUTPUT
echo "enable_security=true" >> $GITHUB_OUTPUT
else
echo "No security flags detected in comment"
echo "disable_security=true" >> $GITHUB_OUTPUT
fi
- name: Check for pro flag
id: check-pro-flag
env:
COMMENT_BODY: ${{ github.event.comment.body }}
run: |
if [[ "$COMMENT_BODY" == *"pro"* ]] || [[ "$COMMENT_BODY" == *"premium"* ]]; then
echo "pro flags detected in comment"
echo "enable_pro=true" >> $GITHUB_OUTPUT
echo "enable_enterprise=false" >> $GITHUB_OUTPUT
elif [[ "$COMMENT_BODY" == *"enterprise"* ]]; then
echo "enterprise flags detected in comment"
echo "enable_enterprise=true" >> $GITHUB_OUTPUT
echo "enable_pro=true" >> $GITHUB_OUTPUT
else
echo "No pro or enterprise flags detected in comment"
echo "enable_pro=false" >> $GITHUB_OUTPUT
echo "enable_enterprise=false" >> $GITHUB_OUTPUT
echo "enable_security=false" >> $GITHUB_OUTPUT
fi
- name: Add 'in_progress' reaction to comment
id: add-eyes-reaction
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
github-token: ${{ steps.generate-token.outputs.token }}
script: |
console.log(`Adding eyes reaction to comment ID: ${context.payload.comment.id}`);
try {
@ -147,23 +124,18 @@ jobs:
needs: check-comment
runs-on: ubuntu-latest
permissions:
contents: read
issues: write
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Checkout PR
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup GitHub App Bot
if: github.actor != 'dependabot[bot]'
id: setup-bot
uses: ./.github/actions/setup-bot
continue-on-error: true
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
@ -173,7 +145,7 @@ jobs:
with:
repository: ${{ needs.check-comment.outputs.pr_repository }}
ref: ${{ needs.check-comment.outputs.pr_ref }}
token: ${{ steps.setup-bot.outputs.token }}
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up JDK
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
@ -183,17 +155,23 @@ jobs:
- name: Run Gradle Command
run: |
if [ "${{ needs.check-comment.outputs.disable_security }}" == "true" ]; then
export DISABLE_ADDITIONAL_FEATURES=true
if [ "${{ needs.check-comment.outputs.enable_security }}" == "true" ]; then
export DOCKER_ENABLE_SECURITY=true
else
export DISABLE_ADDITIONAL_FEATURES=false
export DOCKER_ENABLE_SECURITY=false
fi
./gradlew clean build
env:
STIRLING_PDF_DESKTOP_UI: false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Get version number
id: versionNumber
run: |
VERSION=$(grep "^version =" build.gradle | awk -F'"' '{print $2}')
echo "versionNumber=$VERSION" >> $GITHUB_OUTPUT
- name: Login to Docker Hub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
@ -202,7 +180,7 @@ jobs:
password: ${{ secrets.DOCKER_HUB_API }}
- name: Build and push PR-specific image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: .
file: ./Dockerfile
@ -221,31 +199,16 @@ jobs:
id: deploy
run: |
# Set security settings based on flags
if [ "${{ needs.check-comment.outputs.disable_security }}" == "false" ]; then
DISABLE_ADDITIONAL_FEATURES="false"
if [ "${{ needs.check-comment.outputs.enable_security }}" == "true" ]; then
DOCKER_SECURITY="true"
LOGIN_SECURITY="true"
SECURITY_STATUS="🔒 Security Enabled"
else
DISABLE_ADDITIONAL_FEATURES="true"
DOCKER_SECURITY="false"
LOGIN_SECURITY="false"
SECURITY_STATUS="Security Disabled"
fi
# Set pro/enterprise settings (enterprise implies pro)
if [ "${{ needs.check-comment.outputs.enable_enterprise }}" == "true" ]; then
PREMIUM_ENABLED="true"
PREMIUM_KEY="${{ secrets.ENTERPRISE_KEY }}"
PREMIUM_PROFEATURES_AUDIT_ENABLED="true"
elif [ "${{ needs.check-comment.outputs.enable_pro }}" == "true" ]; then
PREMIUM_ENABLED="true"
PREMIUM_KEY="${{ secrets.PREMIUM_KEY }}"
PREMIUM_PROFEATURES_AUDIT_ENABLED="true"
else
PREMIUM_ENABLED="false"
PREMIUM_KEY=""
PREMIUM_PROFEATURES_AUDIT_ENABLED="false"
fi
# First create the docker-compose content locally
cat > docker-compose.yml << EOF
version: '3.3'
@ -260,7 +223,7 @@ jobs:
- /stirling/PR-${{ needs.check-comment.outputs.pr_number }}/config:/configs:rw
- /stirling/PR-${{ needs.check-comment.outputs.pr_number }}/logs:/logs:rw
environment:
DISABLE_ADDITIONAL_FEATURES: "${DISABLE_ADDITIONAL_FEATURES}"
DOCKER_ENABLE_SECURITY: "${DOCKER_SECURITY}"
SECURITY_ENABLELOGIN: "${LOGIN_SECURITY}"
SYSTEM_DEFAULTLOCALE: en-GB
UI_APPNAME: "Stirling-PDF PR#${{ needs.check-comment.outputs.pr_number }}"
@ -269,9 +232,6 @@ jobs:
SYSTEM_MAXFILESIZE: "100"
METRICS_ENABLED: "true"
SYSTEM_GOOGLEVISIBILITY: "false"
PREMIUM_KEY: "${PREMIUM_KEY}"
PREMIUM_ENABLED: "${PREMIUM_ENABLED}"
PREMIUM_PROFEATURES_AUDIT_ENABLED: "${PREMIUM_PROFEATURES_AUDIT_ENABLED}"
restart: on-failure:5
EOF
@ -290,7 +250,7 @@ jobs:
docker-compose pull
docker-compose up -d
ENDSSH
# Set output for use in PR comment
echo "security_status=${SECURITY_STATUS}" >> $GITHUB_ENV
@ -298,7 +258,7 @@ jobs:
if: success()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
github-token: ${{ steps.generate-token.outputs.token }}
script: |
console.log(`Adding rocket reaction to comment ID: ${{ needs.check-comment.outputs.comment_id }}`);
try {
@ -314,26 +274,11 @@ jobs:
console.error(error);
}
// add label to PR
const prNumber = ${{ needs.check-comment.outputs.pr_number }};
try {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
labels: ['pr-deployed']
});
console.log(`Added 'pr-deployed' label to PR #${prNumber}`);
} catch (error) {
console.error(`Failed to add label to PR: ${error.message}`);
console.error(error);
}
- name: Add failure reaction to comment
if: failure()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
github-token: ${{ steps.generate-token.outputs.token }}
script: |
console.log(`Adding -1 reaction to comment ID: ${{ needs.check-comment.outputs.comment_id }}`);
try {
@ -353,7 +298,7 @@ jobs:
if: success()
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const { GITHUB_REPOSITORY } = process.env;
const [repoOwner, repoName] = GITHUB_REPOSITORY.split('/');
@ -373,11 +318,3 @@ jobs:
issue_number: prNumber,
body: commentBody
});
- name: Cleanup temporary files
if: always()
run: |
echo "Cleaning up temporary files..."
rm -f ../private.key docker-compose.yml
echo "Cleanup complete."
continue-on-error: true

View File

@ -1,7 +1,7 @@
name: PR Deployment cleanup
on:
pull_request_target:
pull_request:
types: [opened, synchronize, reopened, closed]
permissions:
@ -13,99 +13,25 @@ env:
jobs:
cleanup:
if: github.event.action == 'closed'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
issues: write
if: github.event.action == 'closed'
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Checkout PR
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup GitHub App Bot
if: github.actor != 'dependabot[bot]'
id: setup-bot
uses: ./.github/actions/setup-bot
continue-on-error: true
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Remove 'pr-deployed' label if present
id: remove-label-comment
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
script: |
const prNumber = ${{ github.event.pull_request.number }};
const owner = context.repo.owner;
const repo = context.repo.repo;
// Get all labels on the PR
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: prNumber
});
const hasLabel = labels.some(label => label.name === 'pr-deployed');
if (hasLabel) {
console.log("Label 'pr-deployed' found. Removing...");
await github.rest.issues.removeLabel({
owner,
repo,
issue_number: prNumber,
name: 'pr-deployed'
});
} else {
console.log("Label 'pr-deployed' not found. Nothing to do.");
}
// Find existing bot comments about the deployment
const { data: comments } = await github.rest.issues.listComments({
owner,
repo,
issue_number: prNumber
});
const deploymentComments = comments.filter(c =>
c.body?.includes("## 🚀 PR Test Deployment") &&
c.user?.type === "Bot"
);
if (deploymentComments.length > 0) {
for (const comment of deploymentComments) {
await github.rest.issues.deleteComment({
owner,
repo,
comment_id: comment.id
});
console.log(`Deleted deployment comment (ID: ${comment.id})`);
}
} else {
console.log("No matching deployment comments found.");
}
// Set flag if either label or comment was present
const hasDeploymentComment = deploymentComments.length > 0;
core.setOutput('present', (hasLabel || hasDeploymentComment) ? 'true' : 'false');
- name: Set up SSH
if: steps.remove-label-comment.outputs.present == 'true'
run: |
mkdir -p ~/.ssh/
echo "${{ secrets.VPS_SSH_KEY }}" > ../private.key
sudo chmod 600 ../private.key
- name: Cleanup PR deployment
if: steps.remove-label-comment.outputs.present == 'true'
id: cleanup
run: |
ssh -i ../private.key -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -T ${{ secrets.VPS_USERNAME }}@${{ secrets.VPS_HOST }} << 'ENDSSH'
@ -131,11 +57,3 @@ jobs:
echo "NO_CLEANUP_NEEDED"
fi
ENDSSH
- name: Cleanup temporary files
if: always()
run: |
echo "Cleaning up temporary files..."
rm -f ../private.key
echo "Cleanup complete."
continue-on-error: true

View File

@ -1,228 +0,0 @@
name: AI - PR Title Review
on:
pull_request:
types: [opened, edited]
branches: [main]
permissions: # required for secure-repo hardening
contents: read
jobs:
ai-title-review:
permissions:
contents: read
pull-requests: write
models: read
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Configure Git to suppress detached HEAD warning
run: git config --global advice.detachedHead false
- name: Setup GitHub App Bot
if: github.actor != 'dependabot[bot]'
id: setup-bot
uses: ./.github/actions/setup-bot
continue-on-error: true
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Check if actor is repo developer
id: actor
run: |
if [[ "${{ github.actor }}" == *"[bot]" ]]; then
echo "PR opened by a bot skipping AI title review."
echo "is_repo_dev=false" >> $GITHUB_OUTPUT
exit 0
fi
if [ ! -f .github/config/repo_devs.json ]; then
echo "Error: .github/config/repo_devs.json not found" >&2
exit 1
fi
# Validate JSON and extract repo_devs
REPO_DEVS=$(jq -r '.repo_devs[]' .github/config/repo_devs.json 2>/dev/null || { echo "Error: Invalid JSON in repo_devs.json" >&2; exit 1; })
# Convert developer list into Bash array
mapfile -t DEVS_ARRAY <<< "$REPO_DEVS"
if [[ " ${DEVS_ARRAY[*]} " == *" ${{ github.actor }} "* ]]; then
echo "is_repo_dev=true" >> $GITHUB_OUTPUT
else
echo "is_repo_dev=false" >> $GITHUB_OUTPUT
fi
- name: Get PR diff
if: steps.actor.outputs.is_repo_dev == 'true'
id: get_diff
run: |
git fetch origin ${{ github.base_ref }}
git diff origin/${{ github.base_ref }}...HEAD | head -n 10000 | grep -vP '[\x00-\x08\x0B\x0C\x0E-\x1F\x7F\x{202E}\x{200B}]' > pr.diff
echo "diff<<EOF" >> $GITHUB_OUTPUT
cat pr.diff >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Check and sanitize PR title
if: steps.actor.outputs.is_repo_dev == 'true'
id: sanitize_pr_title
env:
PR_TITLE_RAW: ${{ github.event.pull_request.title }}
run: |
# Sanitize PR title: max 72 characters, only printable characters
PR_TITLE=$(echo "$PR_TITLE_RAW" | tr -d '\n\r' | head -c 72 | sed 's/[^[:print:]]//g')
if [[ ${#PR_TITLE} -lt 5 ]]; then
echo "PR title is too short. Must be at least 5 characters." >&2
fi
echo "pr_title=$PR_TITLE" >> $GITHUB_OUTPUT
- name: AI PR Title Analysis
if: steps.actor.outputs.is_repo_dev == 'true'
id: ai-title-analysis
uses: actions/ai-inference@d645f067d89ee1d5d736a5990e327e504d1c5a4a # v1.1.0
with:
model: openai/gpt-4o
system-prompt-file: ".github/config/system-prompt.txt"
prompt: |
Based on the following input data:
{
"diff": "${{ steps.get_diff.outputs.diff }}",
"pr_title": "${{ steps.sanitize_pr_title.outputs.pr_title }}"
}
Respond ONLY with valid JSON in the format:
{
"improved_rating": <0-10>,
"improved_ai_title_rating": <0-10>,
"improved_title": "<ai generated title>"
}
- name: Validate and set SCRIPT_OUTPUT
if: steps.actor.outputs.is_repo_dev == 'true'
run: |
cat <<EOF > ai_response.json
${{ steps.ai-title-analysis.outputs.response }}
EOF
# Validate JSON structure
jq -e '
(keys | sort) == ["improved_ai_title_rating", "improved_rating", "improved_title"] and
(.improved_rating | type == "number" and . >= 0 and . <= 10) and
(.improved_ai_title_rating | type == "number" and . >= 0 and . <= 10) and
(.improved_title | type == "string")
' ai_response.json
if [ $? -ne 0 ]; then
echo "Invalid AI response format" >&2
cat ai_response.json >&2
exit 1
fi
# Parse JSON fields
IMPROVED_RATING=$(jq -r '.improved_rating' ai_response.json)
IMPROVED_TITLE=$(jq -r '.improved_title' ai_response.json)
# Limit comment length to 1000 characters
COMMENT=$(cat <<EOF
## 🤖 AI PR Title Suggestion
**PR-Title Rating**: $IMPROVED_RATING/10
### ⬇️ Suggested Title (copy & paste):
\`\`\`
$IMPROVED_TITLE
\`\`\`
---
*Generated by GitHub Models AI*
EOF
)
echo "$COMMENT" > /tmp/ai-title-comment.md
# Log input and output to the GitHub Step Summary
echo "### 🤖 AI PR Title Analysis" >> $GITHUB_STEP_SUMMARY
echo "### Input PR Title" >> $GITHUB_STEP_SUMMARY
echo '```bash' >> $GITHUB_STEP_SUMMARY
echo "${{ steps.sanitize_pr_title.outputs.pr_title }}" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
echo '### AI Response (raw JSON)' >> $GITHUB_STEP_SUMMARY
echo '```json' >> $GITHUB_STEP_SUMMARY
cat ai_response.json >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
- name: Post comment on PR if needed
if: steps.actor.outputs.is_repo_dev == 'true'
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
continue-on-error: true
with:
github-token: ${{ steps.setup-bot.outputs.token }}
script: |
const fs = require('fs');
const body = fs.readFileSync('/tmp/ai-title-comment.md', 'utf8');
const { GITHUB_REPOSITORY } = process.env;
const [owner, repo] = GITHUB_REPOSITORY.split('/');
const issue_number = context.issue.number;
const ratingMatch = body.match(/\*\*PR-Title Rating\*\*: (\d+)\/10/);
const rating = ratingMatch ? parseInt(ratingMatch[1], 10) : null;
const expectedActor = "${{ steps.setup-bot.outputs.app-slug }}[bot]";
const comments = await github.rest.issues.listComments({ owner, repo, issue_number });
const existing = comments.data.find(c =>
c.user?.login === expectedActor &&
c.body.includes("## 🤖 AI PR Title Suggestion")
);
if (rating === null) {
console.log("No rating found in AI response skipping.");
return;
}
if (rating <= 5) {
if (existing) {
await github.rest.issues.updateComment({
owner, repo,
comment_id: existing.id,
body
});
console.log("Updated existing suggestion comment.");
} else {
await github.rest.issues.createComment({
owner, repo, issue_number,
body
});
console.log("Created new suggestion comment.");
}
} else {
const praise = `## 🤖 AI PR Title Suggestion\n\nGreat job! The current PR title is clear and well-structured.\n\n✅ No suggestions needed.\n\n---\n*Generated by GitHub Models AI*`;
if (existing) {
await github.rest.issues.updateComment({
owner, repo,
comment_id: existing.id,
body: praise
});
console.log("Replaced suggestion with praise.");
} else {
console.log("Rating > 5 and no existing comment skipping comment.");
}
}
- name: is not repo dev
if: steps.actor.outputs.is_repo_dev != 'true'
run: |
exit 0 # Skip the AI title review for non-repo developers
- name: Clean up
if: always()
run: |
rm -f pr.diff ai_response.json /tmp/ai-title-comment.md
echo "Cleaned up temporary files."
continue-on-error: true # Ensure cleanup runs even if previous steps fail

27
.github/workflows/auto-labeler.yml vendored Normal file
View File

@ -0,0 +1,27 @@
name: "Pull Request Labeler"
on:
pull_request_target:
types: [opened, synchronize]
permissions:
contents: read
jobs:
labeler:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Apply Labels
uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
configuration-path: .github/labeler-config.yml
sync-labels: true

View File

@ -1,35 +0,0 @@
name: "Auto Pull Request Labeler V2"
on:
pull_request_target:
types: [opened, synchronize]
permissions:
contents: read
jobs:
labeler:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup GitHub App Bot
id: setup-bot
uses: ./.github/actions/setup-bot
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: srvaroa/labeler@0a20eccb8c94a1ee0bed5f16859aece1c45c3e55 # v1.13.0
with:
config_path: .github/labeler-config-srvaroa.yml
use_local_config: false
fail_on_error: true
env:
GITHUB_TOKEN: "${{ steps.setup-bot.outputs.token }}"

View File

@ -1,46 +1,15 @@
name: Build and Test Workflow
name: Build repo
on:
workflow_dispatch:
# push:
# branches: ["main"]
push:
branches: ["main"]
pull_request:
branches: ["main"]
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
files-changed:
name: detect what files changed
runs-on: ubuntu-latest
timeout-minutes: 3
# Map a step output to a job output
outputs:
build: ${{ steps.changes.outputs.build }}
app: ${{ steps.changes.outputs.app }}
project: ${{ steps.changes.outputs.project }}
openapi: ${{ steps.changes.outputs.openapi }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Check for file changes
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: changes
with:
filters: ".github/config/.files.yaml"
build:
runs-on: ubuntu-latest
@ -52,11 +21,10 @@ jobs:
fail-fast: false
matrix:
jdk-version: [17, 21]
spring-security: [true, false]
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -69,98 +37,32 @@ jobs:
java-version: ${{ matrix.jdk-version }}
distribution: "temurin"
- name: Setup Gradle
uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
with:
gradle-version: 8.14
- name: Build with Gradle and spring security ${{ matrix.spring-security }}
- name: Build with Gradle and no spring security
run: ./gradlew clean build
env:
DISABLE_ADDITIONAL_FEATURES: ${{ matrix.spring-security }}
DOCKER_ENABLE_SECURITY: false
- name: Check Test Reports Exist
id: check-reports
if: always()
run: |
declare -a dirs=(
"app/core/build/reports/tests/"
"app/core/build/test-results/"
"app/common/build/reports/tests/"
"app/common/build/test-results/"
"app/proprietary/build/reports/tests/"
"app/proprietary/build/test-results/"
)
missing_reports=()
for dir in "${dirs[@]}"; do
if [ ! -d "$dir" ]; then
missing_reports+=("$dir")
fi
done
if [ ${#missing_reports[@]} -gt 0 ]; then
echo "ERROR: The following required test report directories are missing:"
printf '%s\n' "${missing_reports[@]}"
exit 1
fi
echo "All required test report directories are present"
- name: Build with Gradle and with spring security
run: ./gradlew clean build
env:
DOCKER_ENABLE_SECURITY: true
- name: Upload Test Reports
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: test-reports-jdk-${{ matrix.jdk-version }}-spring-security-${{ matrix.spring-security }}
name: test-reports-jdk-${{ matrix.jdk-version }}
path: |
app/core/build/reports/tests/
app/core/build/test-results/
app/core/build/reports/problems/
app/common/build/reports/tests/
app/common/build/test-results/
app/common/build/reports/problems/
app/proprietary/build/reports/tests/
app/proprietary/build/test-results/
app/proprietary/build/reports/problems/
build/reports/tests/
build/test-results/
build/reports/problems/
retention-days: 3
if-no-files-found: warn
check-generateOpenApiDocs:
if: needs.files-changed.outputs.openapi == 'true'
needs: [files-changed, build]
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up JDK 17
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: "17"
distribution: "temurin"
- name: Setup Gradle
uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- name: Generate OpenAPI documentation
run: ./gradlew :stirling-pdf:generateOpenApiDocs
- name: Upload OpenAPI Documentation
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: openapi-docs
path: ./SwaggerDoc.json
check-licence:
if: needs.files-changed.outputs.build == 'true'
needs: [files-changed, build]
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -171,7 +73,7 @@ jobs:
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: "17"
distribution: "temurin"
distribution: "adopt"
- name: check the licenses for compatibility
run: ./gradlew clean checkLicense
@ -186,8 +88,6 @@ jobs:
retention-days: 3
docker-compose-tests:
if: needs.files-changed.outputs.project == 'true'
needs: files-changed
# if: github.event_name == 'push' && github.ref == 'refs/heads/main' ||
# (github.event_name == 'pull_request' &&
# contains(github.event.pull_request.labels.*.name, 'licenses') == false &&
@ -206,7 +106,7 @@ jobs:
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -217,14 +117,14 @@ jobs:
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: "17"
distribution: "temurin"
distribution: "adopt"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Install Docker Compose
run: |
sudo curl -SL "https://github.com/docker/compose/releases/download/v2.37.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo curl -SL "https://github.com/docker/compose/releases/download/v2.32.4/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
- name: Set up Python
@ -232,7 +132,6 @@ jobs:
with:
python-version: "3.12"
cache: 'pip' # caching pip dependencies
cache-dependency-path: ./testing/cucumber/requirements.txt
- name: Pip requirements
run: |
@ -244,69 +143,3 @@ jobs:
chmod +x ./testing/test.sh
chmod +x ./testing/test_disabledEndpoints.sh
./testing/test.sh
test-build-docker-images:
if: github.event_name == 'pull_request' && needs.files-changed.outputs.project == 'true'
needs: [files-changed, build, check-generateOpenApiDocs, check-licence]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
docker-rev: ["Dockerfile", "Dockerfile.ultra-lite", "Dockerfile.fat"]
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
with:
egress-policy: audit
- name: Checkout Repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up JDK 17
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: "17"
distribution: "temurin"
- name: Set up Gradle
uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
with:
gradle-version: 8.14
- name: Build application
run: ./gradlew clean build
env:
DISABLE_ADDITIONAL_FEATURES: true
STIRLING_PDF_DESKTOP_UI: false
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build ${{ matrix.docker-rev }}
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./${{ matrix.docker-rev }}
push: false
cache-from: type=gha
cache-to: type=gha,mode=max
platforms: linux/amd64,linux/arm64/v8
provenance: true
sbom: true
- name: Upload Reports
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: reports-docker-${{ matrix.docker-rev }}
path: |
build/reports/tests/
build/test-results/
build/reports/problems/
retention-days: 3
if-no-files-found: warn

View File

@ -4,19 +4,7 @@ on:
pull_request_target:
types: [opened, synchronize, reopened]
paths:
- "app/core/src/main/resources/messages_*.properties"
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
- "src/main/resources/messages_*.properties"
permissions:
contents: read # Allow read access to repository content
@ -27,28 +15,25 @@ jobs:
runs-on: ubuntu-latest
permissions:
issues: write # Allow posting comments on issues/PRs
pull-requests: write # Allow writing to pull requests
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Checkout main branch first
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup GitHub App Bot
id: setup-bot
uses: ./.github/actions/setup-bot
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
python-version: "3.12"
- name: Get PR data
id: get-pr-data
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
script: |
const prNumber = context.payload.pull_request.number;
const repoOwner = context.payload.repository.owner.login;
@ -69,30 +54,16 @@ jobs:
- name: Fetch PR changed files
id: fetch-pr-changes
env:
GH_TOKEN: ${{ steps.setup-bot.outputs.token }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Fetching PR changed files..."
echo "Getting list of changed files from PR..."
# Check if PR number exists
if [ -z "${{ steps.get-pr-data.outputs.pr_number }}" ]; then
echo "Error: PR number is empty"
exit 1
fi
# Get changed files and filter for properties files, handle case where no matches are found
gh pr view ${{ steps.get-pr-data.outputs.pr_number }} --json files -q ".files[].path" | grep -E '^app/core/src/main/resources/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}\.properties$' > changed_files.txt || echo "No matching properties files found in PR"
# Check if any files were found
if [ ! -s changed_files.txt ]; then
echo "No properties files changed in this PR"
echo "Workflow will exit early as no relevant files to check"
exit 0
fi
echo "Found $(wc -l < changed_files.txt) matching properties files"
gh pr view ${{ steps.get-pr-data.outputs.pr_number }} --json files -q ".files[].path" | grep -E '^src/main/resources/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}\.properties$' > changed_files.txt # Filter only matching property files
- name: Determine reference file test
id: determine-file
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
script: |
const fs = require("fs");
const path = require("path");
@ -127,11 +98,8 @@ jobs:
// Filter for relevant files based on the PR changes
const changedFiles = files
.filter(file =>
file.status !== "removed" &&
/^app\/core\/src\/main\/resources\/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}\.properties$/.test(file.filename)
)
.map(file => file.filename);
.map(file => file.filename)
.filter(file => /^src\/main\/resources\/messages_[a-zA-Z_]{2}_[a-zA-Z_]{2,7}\.properties$/.test(file));
console.log("Changed files:", changedFiles);
@ -169,12 +137,12 @@ jobs:
// Determine reference file
let referenceFilePath;
if (changedFiles.includes("app/core/src/main/resources/messages_en_GB.properties")) {
if (changedFiles.includes("src/main/resources/messages_en_GB.properties")) {
console.log("Using PR branch reference file.");
const { data: fileContent } = await github.rest.repos.getContent({
owner: prRepoOwner,
repo: prRepoName,
path: "app/core/src/main/resources/messages_en_GB.properties",
path: "src/main/resources/messages_en_GB.properties",
ref: branch,
});
@ -186,7 +154,7 @@ jobs:
const { data: fileContent } = await github.rest.repos.getContent({
owner: repoOwner,
repo: repoName,
path: "app/core/src/main/resources/messages_en_GB.properties",
path: "src/main/resources/messages_en_GB.properties",
ref: "main",
});
@ -236,7 +204,6 @@ jobs:
if: env.SCRIPT_OUTPUT != ''
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ steps.setup-bot.outputs.token }}
script: |
const { GITHUB_REPOSITORY, SCRIPT_OUTPUT } = process.env;
const [repoOwner, repoName] = GITHUB_REPOSITORY.split('/');
@ -252,7 +219,7 @@ jobs:
const comment = comments.data.find(c => c.body.includes("## 🚀 Translation Verification Summary"));
// Only update or create comments by the action user
const expectedActor = "${{ steps.setup-bot.outputs.app-slug }}[bot]";
const expectedActor = "github-actions[bot]";
if (comment && comment.user.login === expectedActor) {
// Update existing comment
@ -281,12 +248,3 @@ jobs:
run: |
echo "Failing the job because errors were detected."
exit 1
- name: Cleanup temporary files
if: always()
run: |
echo "Cleaning up temporary files..."
rm -rf pr-branch
rm -f pr-branch-messages_en_GB.properties main-branch-messages_en_GB.properties changed_files.txt result.txt
echo "Cleanup complete."
continue-on-error: true # Ensure cleanup runs even if previous steps fail

View File

@ -17,11 +17,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: "Checkout Repository"
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: "Dependency Review"
uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1
uses: actions/dependency-review-action@ce3cf9537a52e8119d91fd484ab5b8a807627bf8 # v4.6.0

View File

@ -7,18 +7,6 @@ on:
paths:
- "build.gradle"
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
@ -28,52 +16,54 @@ jobs:
permissions:
contents: write
pull-requests: write
repository-projects: write # Required for enabling automerge
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Check out code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Setup GitHub App Bot
id: setup-bot
uses: ./.github/actions/setup-bot
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Check out code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up JDK 17
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
java-version: "17"
distribution: "temurin"
distribution: "adopt"
- name: Setup Gradle
uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- name: Check licenses for compatibility
- name: check the licenses for compatibility
run: ./gradlew clean checkLicense
- name: Upload artifact on failure
- name: FAILED - check the licenses for compatibility
if: failure()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: dependencies-without-allowed-license.json
path: build/reports/dependency-license/dependencies-without-allowed-license.json
path: |
build/reports/dependency-license/dependencies-without-allowed-license.json
retention-days: 3
- name: Move and rename license file
- name: Move and Rename License File
run: |
mv build/reports/dependency-license/index.json app/core/src/main/resources/static/3rdPartyLicenses.json
mv build/reports/dependency-license/index.json src/main/resources/static/3rdPartyLicenses.json
- name: Commit changes
- name: Set up git config
run: |
git add app/core/src/main/resources/static/3rdPartyLicenses.json
git config --global user.name "stirlingbot[bot]"
git config --global user.email "1113334+stirlingbot[bot]@users.noreply.github.com"
- name: Run git add
run: |
git add src/main/resources/static/3rdPartyLicenses.json
git diff --staged --quiet || echo "CHANGES_DETECTED=true" >> $GITHUB_ENV
- name: Create Pull Request
@ -81,16 +71,16 @@ jobs:
if: env.CHANGES_DETECTED == 'true'
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ steps.setup-bot.outputs.token }}
token: ${{ steps.generate-token.outputs.token }}
commit-message: "Update 3rd Party Licenses"
committer: ${{ steps.setup-bot.outputs.committer }}
author: ${{ steps.setup-bot.outputs.committer }}
committer: "stirlingbot[bot] <1113334+stirlingbot[bot]@users.noreply.github.com>"
author: "stirlingbot[bot] <1113334+stirlingbot[bot]@users.noreply.github.com>"
signoff: true
branch: update-3rd-party-licenses
title: "Update 3rd Party Licenses"
body: |
Auto-generated by ${{ steps.setup-bot.outputs.app-slug }}[bot]
labels: Licenses,github-actions
Auto-generated by StirlingBot
labels: licenses,github-actions
draft: false
delete-branch: true
sign-commits: true
@ -99,4 +89,4 @@ jobs:
if: steps.cpr.outputs.pull-request-operation == 'created'
run: gh pr merge --squash --auto "${{ steps.cpr.outputs.pull-request-number }}"
env:
GH_TOKEN: ${{ steps.setup-bot.outputs.token }}
GH_TOKEN: ${{ steps.generate-token.outputs.token }}

View File

@ -15,7 +15,7 @@ jobs:
issues: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit

View File

@ -21,46 +21,42 @@ jobs:
versionMac: ${{ steps.versionNumberMac.outputs.versionNumberMac }}
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up JDK
uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
distribution: 'temurin'
java-version: '21'
# ✅ Get version from Gradle
# Get version number
- name: Get version number
id: versionNumber
run: |
VERSION=$(./gradlew printVersion --quiet | tail -1)
VERSION=$(grep "^version =" build.gradle | awk -F'"' '{print $2}')
echo "versionNumber=$VERSION" >> $GITHUB_OUTPUT
# ✅ Get Mac-specific version from Gradle
- name: Get version number mac
id: versionNumberMac
run: |
VERSION_MAC=$(./gradlew printMacVersion --quiet | tail -1)
echo "versionNumberMac=$VERSION_MAC" >> $GITHUB_OUTPUT
VERSION=$(grep "^version =" build.gradle | awk -F'"' '{print $2}')
CURRENT_YEAR=$(date +'%Y')
IFS='.' read -r -a VERSION_PARTS <<< "$VERSION"
MAC_VERSION="$CURRENT_YEAR.${VERSION_PARTS[1]:-0}.${VERSION_PARTS[2]:-0}"
echo "versionNumberMac=$MAC_VERSION" >> $GITHUB_OUTPUT
build-portable:
needs: read_versions
runs-on: ubuntu-latest
strategy:
matrix:
disable_security: [true, false]
enable_security: [true, false]
include:
- disable_security: false
- enable_security: true
file_suffix: "-with-login"
- disable_security: true
- enable_security: false
file_suffix: ""
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -72,21 +68,21 @@ jobs:
java-version: "21"
distribution: "temurin"
- uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
with:
gradle-version: 8.14
- name: Generate jar (Disable Security=${{ matrix.disable_security }})
- name: Generate jar (With Security=${{ matrix.enable_security }})
run: ./gradlew clean createExe
env:
DISABLE_ADDITIONAL_FEATURES: ${{ matrix.disable_security }}
DOCKER_ENABLE_SECURITY: ${{ matrix.enable_security }}
STIRLING_PDF_DESKTOP_UI: false
- name: Rename binaries
run: |
mkdir ./binaries
mv ./build/launch4j/Stirling-PDF.exe ./binaries/win-Stirling-PDF-portable-Server${{ matrix.file_suffix }}.exe
mv ./app/core/build/libs/stirling-pdf-${{ needs.read_versions.outputs.version }}.jar ./binaries/Stirling-PDF${{ matrix.file_suffix }}.jar
mv ./build/libs/Stirling-PDF-${{ needs.read_versions.outputs.version }}.jar ./binaries/Stirling-PDF${{ matrix.file_suffix }}.jar
- name: Upload build artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
@ -102,15 +98,15 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
disable_security: [true, false]
enable_security: [true, false]
include:
- disable_security: false
- enable_security: true
file_suffix: "with-login-"
- disable_security: true
- enable_security: false
file_suffix: ""
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -148,7 +144,7 @@ jobs:
contents: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -160,7 +156,7 @@ jobs:
java-version: "21"
distribution: "temurin"
- uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
with:
gradle-version: 8.14
@ -175,7 +171,7 @@ jobs:
- name: Build Installer
run: ./gradlew build jpackage -x test --info
env:
DISABLE_ADDITIONAL_FEATURES: true
DOCKER_ENABLE_SECURITY: false
STIRLING_PDF_DESKTOP_UI: true
BROWSER_OPEN: true
@ -238,7 +234,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -252,7 +248,7 @@ jobs:
- name: Install Cosign
if: matrix.os == 'windows-latest'
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
- name: Generate key pair
if: matrix.os == 'windows-latest'
@ -301,7 +297,7 @@ jobs:
contents: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -310,7 +306,7 @@ jobs:
- name: Display structure of downloaded files
run: ls -R
- name: Upload binaries, attestations and signatures to Release and create GitHub Release
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
uses: softprops/action-gh-release@01570a1f39cb168c169c802c3bceb9e93fb10974 # v2.1.0
with:
tag_name: v${{ needs.read_versions.outputs.version }}
generate_release_notes: true

View File

@ -16,54 +16,62 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Get GitHub App User ID
id: get-user-id
run: echo "user-id=$(gh api "/users/${{ steps.generate-token.outputs.app-slug }}[bot]" --jq .id)" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
- id: committer
run: |
echo "string=${{ steps.generate-token.outputs.app-slug }}[bot] <${{ steps.get-user-id.outputs.user-id }}+${{ steps.generate-token.outputs.app-slug }}[bot]@users.noreply.github.com>" >> "$GITHUB_OUTPUT"
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Setup GitHub App Bot
id: setup-bot
uses: ./.github/actions/setup-bot
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: 3.12
cache: 'pip' # caching pip dependencies
cache-dependency-path: ./.github/scripts/requirements_pre_commit.txt
- name: Run Pre-Commit Hooks
run: |
pip install --require-hashes -r ./.github/scripts/requirements_pre_commit.txt
- run: pre-commit run --all-files -c .pre-commit-config.yaml
continue-on-error: true
- name: Set up git config
run: |
git config --global user.name ${{ steps.generate-token.outputs.app-slug }}[bot]
git config --global user.email "${{ steps.get-user-id.outputs.user-id }}+${{ steps.generate-token.outputs.app-slug }}[bot]@users.noreply.github.com"
- name: git add
run: |
git add .
git diff --staged --quiet || echo "CHANGES_DETECTED=true" >> $GITHUB_ENV
- name: Create Pull Request
if: env.CHANGES_DETECTED == 'true'
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ steps.setup-bot.outputs.token }}
token: ${{ steps.generate-token.outputs.token }}
commit-message: ":file_folder: pre-commit"
committer: ${{ steps.setup-bot.outputs.committer }}
author: ${{ steps.setup-bot.outputs.committer }}
committer: ${{ steps.committer.outputs.string }}
author: ${{ steps.committer.outputs.string }}
signoff: true
branch: pre-commit
title: "🤖 format everything with pre-commit by ${{ steps.setup-bot.outputs.app-slug }}"
title: "🤖 format everything with pre-commit by <${{ steps.generate-token.outputs.app-slug }}>"
body: |
Auto-generated by [create-pull-request][1] with **${{ steps.setup-bot.outputs.app-slug }}**
Auto-generated by [create-pull-request][1] with **${{ steps.generate-token.outputs.app-slug }}**
[1]: https://github.com/peter-evans/create-pull-request
draft: false

View File

@ -7,18 +7,6 @@ on:
- master
- main
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
@ -30,7 +18,7 @@ jobs:
id-token: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -42,25 +30,25 @@ jobs:
java-version: "17"
distribution: "temurin"
- uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
with:
gradle-version: 8.14
- name: Run Gradle Command
run: ./gradlew clean build
env:
DISABLE_ADDITIONAL_FEATURES: true
DOCKER_ENABLE_SECURITY: false
STIRLING_PDF_DESKTOP_UI: false
- name: Install cosign
if: github.ref == 'refs/heads/master'
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
with:
cosign-release: "v2.4.1"
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Get version number
id: versionNumber
@ -89,7 +77,6 @@ jobs:
- name: Generate tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
if: github.ref != 'refs/heads/main'
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
@ -99,11 +86,11 @@ jobs:
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }},enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=alpha,enable=${{ github.ref == 'refs/heads/main' }}
- name: Build and push main Dockerfile
id: build-push-regular
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
if: github.ref != 'refs/heads/main'
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
@ -148,7 +135,7 @@ jobs:
- name: Build and push Dockerfile-ultra-lite
id: build-push-lite
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
if: github.ref != 'refs/heads/main'
with:
context: .
@ -166,6 +153,7 @@ jobs:
- name: Generate tags fat
id: meta3
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
if: github.ref != 'refs/heads/main'
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
@ -175,11 +163,11 @@ jobs:
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }}-fat,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest-fat,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=alpha,enable=${{ github.ref == 'refs/heads/main' }}
- name: Build and push main Dockerfile fat
id: build-push-fat
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
if: github.ref != 'refs/heads/main'
with:
builder: ${{ steps.buildx.outputs.name }}
context: .

View File

@ -13,17 +13,17 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
disable_security: [true, false]
enable_security: [true, false]
include:
- disable_security: false
- enable_security: true
file_suffix: "-with-login"
- disable_security: true
- enable_security: false
file_suffix: ""
outputs:
version: ${{ steps.versionNumber.outputs.versionNumber }}
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -35,14 +35,14 @@ jobs:
java-version: "17"
distribution: "temurin"
- uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
with:
gradle-version: 8.14
- name: Generate jar (Disable Security=${{ matrix.disable_security }})
- name: Generate jar (With Security=${{ matrix.enable_security }})
run: ./gradlew clean createExe
env:
DISABLE_ADDITIONAL_FEATURES: ${{ matrix.disable_security }}
DOCKER_ENABLE_SECURITY: ${{ matrix.enable_security }}
STIRLING_PDF_DESKTOP_UI: false
- name: Get version number
@ -75,15 +75,15 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
disable_security: [true, false]
enable_security: [true, false]
include:
- disable_security: false
- enable_security: true
file_suffix: "-with-login"
- disable_security: true
- enable_security: false
file_suffix: ""
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -95,7 +95,7 @@ jobs:
run: ls -R
- name: Install Cosign
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
- name: Generate key pair
run: cosign generate-key-pair
@ -153,15 +153,15 @@ jobs:
contents: write
strategy:
matrix:
disable_security: [true, false]
enable_security: [true, false]
include:
- disable_security: false
- enable_security: true
file_suffix: "-with-login"
- disable_security: true
- enable_security: false
file_suffix: ""
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -171,7 +171,7 @@ jobs:
name: signed${{ matrix.file_suffix }}
- name: Upload binaries, attestations and signatures to Release and create GitHub Release
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
uses: softprops/action-gh-release@01570a1f39cb168c169c802c3bceb9e93fb10974 # v2.1.0
with:
tag_name: v${{ needs.build.outputs.version }}
generate_release_notes: true

View File

@ -34,7 +34,7 @@ jobs:
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -44,7 +44,7 @@ jobs:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2
uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1
with:
results_file: results.sarif
results_format: sarif
@ -74,6 +74,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@d6bbdef45e766d081b84a2def353b0055f728d3e # v3.29.3
uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17
with:
sarif_file: results.sarif

View File

@ -9,18 +9,6 @@ on:
- main
workflow_dispatch:
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
pull-requests: read
actions: read
@ -30,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -39,13 +27,13 @@ jobs:
fetch-depth: 0
- name: Setup Gradle
uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- name: Build and analyze with Gradle
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
DISABLE_ADDITIONAL_FEATURES: false
DOCKER_ENABLE_SECURITY: true
STIRLING_PDF_DESKTOP_UI: true
run: |
./gradlew clean build sonar \

View File

@ -16,7 +16,7 @@ jobs:
pull-requests: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit

View File

@ -6,18 +6,6 @@ on:
branches:
- master
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
@ -26,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -38,10 +26,10 @@ jobs:
java-version: "17"
distribution: "temurin"
- uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
- uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1
- name: Generate Swagger documentation
run: ./gradlew :stirling-pdf:generateOpenApiDocs
run: ./gradlew generateOpenApiDocs
- name: Upload Swagger Documentation to SwaggerHub
run: ./gradlew swaggerhubUpload

View File

@ -8,57 +8,87 @@ on:
paths:
- "build.gradle"
- "README.md"
- "app/core/src/main/resources/messages_*.properties"
- "app/core/src/main/resources/static/3rdPartyLicenses.json"
- "src/main/resources/messages_*.properties"
- "src/main/resources/static/3rdPartyLicenses.json"
- "scripts/ignore_translation.toml"
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
sync-files:
read_bot_entries:
runs-on: ubuntu-latest
outputs:
userName: ${{ steps.get-user-id.outputs.user_name }}
userEmail: ${{ steps.get-user-id.outputs.user_email }}
committer: ${{ steps.committer.outputs.committer }}
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup GitHub App Bot
id: setup-bot
uses: ./.github/actions/setup-bot
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ secrets.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- name: Get GitHub App User ID
id: get-user-id
run: |
USER_NAME="${{ steps.generate-token.outputs.app-slug }}[bot]"
USER_ID=$(gh api "/users/$USER_NAME" --jq .id)
USER_EMAIL="$USER_ID+$USER_NAME@users.noreply.github.com"
echo "user_name=$USER_NAME" >> "$GITHUB_OUTPUT"
echo "user_email=$USER_EMAIL" >> "$GITHUB_OUTPUT"
echo "user-id=$USER_ID" >> "$GITHUB_OUTPUT"
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
- id: committer
run: |
COMMITTER="${{ steps.get-user-id.outputs.user_name }} <${{ steps.get-user-id.outputs.user_email }}>"
echo "committer=$COMMITTER" >> "$GITHUB_OUTPUT"
sync-files:
needs: ["read_bot_entries"]
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
with:
app-id: ${{ vars.GH_APP_ID }}
private-key: ${{ secrets.GH_APP_PRIVATE_KEY }}
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
cache: 'pip' # caching pip dependencies
- name: Sync translation property files
run: |
python .github/scripts/check_language_properties.py --reference-file "app/core/src/main/resources/messages_en_GB.properties" --branch main
python .github/scripts/check_language_properties.py --reference-file "src/main/resources/messages_en_GB.properties" --branch main
- name: Commit translation files
- name: Set up git config
run: |
git add app/core/src/main/resources/messages_*.properties
git diff --staged --quiet || git commit -m ":memo: Sync translation files" || echo "No changes detected"
git config --global user.name ${{ needs.read_bot_entries.outputs.userName }}
git config --global user.email ${{ needs.read_bot_entries.outputs.userEmail }}
- name: Run git add
run: |
git add src/main/resources/messages_*.properties
git diff --staged --quiet || git commit -m ":memo: Sync translation files" || echo "no changes"
- name: Install dependencies
run: pip install --require-hashes -r ./.github/scripts/requirements_sync_readme.txt
@ -69,17 +99,16 @@ jobs:
- name: Run git add
run: |
git add README.md scripts/ignore_translation.toml
git diff --staged --quiet || git commit -m ":memo: Sync README.md & scripts/ignore_translation.toml" || echo "No changes detected"
git add README.md
git diff --staged --quiet || git commit -m ":memo: Sync README.md" || echo "no changes"
- name: Create Pull Request
if: always()
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ steps.setup-bot.outputs.token }}
token: ${{ steps.generate-token.outputs.token }}
commit-message: Update files
committer: ${{ steps.setup-bot.outputs.committer }}
author: ${{ steps.setup-bot.outputs.committer }}
committer: ${{ needs.read_bot_entries.outputs.committer }}
author: ${{ needs.read_bot_entries.outputs.committer }}
signoff: true
branch: sync_readme
title: ":globe_with_meridians: Sync Translations + Update README Progress Table"
@ -113,4 +142,4 @@ jobs:
sign-commits: true
add-paths: |
README.md
app/core/src/main/resources/messages_*.properties
src/main/resources/messages_*.properties

View File

@ -4,18 +4,6 @@ on:
push:
branches: ["master", "UITest", "testdriver"]
# cancel in-progress jobs if a new job is triggered
# This is useful to avoid running multiple builds for the same branch if a new commit is pushed
# or a pull request is updated.
# It helps to save resources and time by ensuring that only the latest commit is built and tested
# This is particularly useful for long-running jobs that may take a while to complete.
# The `group` is set to a combination of the workflow name, event name, and branch name.
# This ensures that jobs are grouped by the workflow and branch, allowing for cancellation of
# in-progress jobs when a new commit is pushed to the same branch or a new pull request is opened.
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref_name || github.ref }}
cancel-in-progress: true
permissions:
contents: read
@ -24,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
@ -37,18 +25,13 @@ jobs:
java-version: '17'
distribution: 'temurin'
- name: Setup Gradle
uses: gradle/actions/setup-gradle@ac638b010cf58a27ee6c972d7336334ccaf61c96 # v4.4.1
with:
gradle-version: 8.14
- name: Build with Gradle
run: ./gradlew clean build
env:
DISABLE_ADDITIONAL_FEATURES: true
DOCKER_ENABLE_SECURITY: false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Get version number
id: versionNumber
@ -63,7 +46,7 @@ jobs:
password: ${{ secrets.DOCKER_HUB_API }}
- name: Build and push test image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: .
file: ./Dockerfile
@ -93,7 +76,7 @@ jobs:
- /stirling/test-${{ github.sha }}/config:/configs:rw
- /stirling/test-${{ github.sha }}/logs:/logs:rw
environment:
DISABLE_ADDITIONAL_FEATURES: "true"
DOCKER_ENABLE_SECURITY: "false"
SECURITY_ENABLELOGIN: "false"
SYSTEM_DEFAULTLOCALE: en-GB
UI_APPNAME: "Stirling-PDF Test"
@ -122,17 +105,12 @@ jobs:
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
cache: 'npm'
- name: Run TestDriver.ai
uses: testdriverai/action@f0d0f45fdd684db628baa843fe9313f3ca3a8aa8 #1.1.3
with:
@ -156,7 +134,7 @@ jobs:
steps:
- name: Harden Runner
uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0
uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
with:
egress-policy: audit

10
.gitignore vendored
View File

@ -13,7 +13,6 @@ local.properties
.recommenders
.classpath
.project
*.local.json
version.properties
#### Stirling-PDF Files ###
@ -124,13 +123,7 @@ SwaggerDoc.json
*.tar.gz
*.rar
*.db
build
app/core/build
app/common/build
app/proprietary/build
common/build
proprietary/build
stirling-pdf/build
/build
# Byte-compiled / optimized / DLL files
__pycache__/
@ -200,3 +193,4 @@ id_ed25519.pub
# node_modules
node_modules/
*.mjs

View File

@ -1,28 +1,28 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.0
rev: v0.11.6
hooks:
- id: ruff
args:
- --fix
- --line-length=127
files: ^((\.github/scripts|scripts|app/core/src/main/resources/static/python)/.+)?[^/]+\.py$
files: ^((\.github/scripts|scripts)/.+)?[^/]+\.py$
exclude: (split_photos.py)
- id: ruff-format
files: ^((\.github/scripts|scripts|app/core/src/main/resources/static/python)/.+)?[^/]+\.py$
files: ^((\.github/scripts|scripts)/.+)?[^/]+\.py$
exclude: (split_photos.py)
- repo: https://github.com/codespell-project/codespell
rev: v2.4.1
hooks:
- id: codespell
args:
- --ignore-words-list=thirdParty,tabEl,tabEls
- --ignore-words-list=
- --skip="./.*,*.csv,*.json,*.ambr"
- --quiet-level=2
files: \.(html|css|js|py|md)$
exclude: (.vscode|.devcontainer|app/core/src/main/resources|app/proprietary/src/main/resources|Dockerfile|.*/pdfjs.*|.*/thirdParty.*|bootstrap.*|.*\.min\..*|.*diff\.js)
exclude: (.vscode|.devcontainer|src/main/resources|Dockerfile|.*/pdfjs.*|.*/thirdParty.*|bootstrap.*|.*\.min\..*|.*diff\.js)
- repo: https://github.com/gitleaks/gitleaks
rev: v8.27.2
rev: v8.24.3
hooks:
- id: gitleaks
- repo: https://github.com/pre-commit/pre-commit-hooks
@ -34,13 +34,3 @@ repos:
- id: trailing-whitespace
files: ^.*(\.js|\.java|\.py|\.yml)$
exclude: ^(.*/pdfjs.*|.*/thirdParty.*|bootstrap.*|.*\.min\..*|.*diff\.js|\.github/workflows/.*$)
# - repo: https://github.com/thibaudcolas/pre-commit-stylelint
# rev: v16.21.1
# hooks:
# - id: stylelint
# additional_dependencies:
# - stylelint@16.21.1
# - stylelint-config-standard@38.0.0
# - "@stylistic/stylelint-plugin@3.1.3"
# files: \.(css)$
# args: [--fix]

View File

@ -15,9 +15,6 @@
"ms-azuretools.vscode-docker", // Docker extension for Visual Studio Code
"GitHub.copilot", // GitHub Copilot AI pair programmer for Visual Studio Code
"GitHub.vscode-pull-request-github", // GitHub Pull Requests extension for Visual Studio Code
"charliermarsh.ruff", // Ruff code formatter for Python to follow the Ruff Style Guide
"yzhang.markdown-all-in-one", // Markdown All-in-One extension for enhanced Markdown editing
"stylelint.vscode-stylelint", // Stylelint extension for CSS and SCSS linting
"redhat.vscode-yaml", // YAML extension for Visual Studio Code
"charliermarsh.ruff" // Ruff code formatter for Python to follow the Ruff Style Guide
]
}

61
.vscode/settings.json vendored
View File

@ -3,44 +3,14 @@
"editor.guides.bracketPairs": "active",
"editor.guides.bracketPairsHorizontal": "active",
"cSpell.enabled": false,
"[feature]": {
"editor.defaultFormatter": "alexkrechik.cucumberautocomplete"
},
"[java]": {
"editor.defaultFormatter": "josevseb.google-java-format-for-vs-code"
},
"[jsonc]": {
"editor.defaultFormatter": "vscode.json-language-features"
},
"[css]": {
"editor.defaultFormatter": "stylelint.vscode-stylelint"
},
"[json]": {
"editor.defaultFormatter": "vscode.json-language-features"
},
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter"
},
"[gradle-kotlin-dsl]": {
"editor.defaultFormatter": "vscjava.vscode-gradle"
},
"[markdown]": {
"editor.defaultFormatter": "yzhang.markdown-all-in-one"
},
"[gradle-build]": {
"editor.defaultFormatter": "vscjava.vscode-gradle"
},
"[gradle]": {
"editor.defaultFormatter": "vscjava.vscode-gradle"
},
"[yaml]": {
"editor.defaultFormatter": "redhat.vscode-yaml"
},
"java.compile.nullAnalysis.mode": "automatic",
"java.configuration.updateBuildConfiguration": "interactive",
"java.format.enabled": true,
"java.format.settings.profile": "GoogleStyle",
"java.format.settings.google.version": "1.27.0",
"java.format.settings.google.version": "1.26.0",
"java.format.settings.google.extra": "--aosp --skip-sorting-imports --skip-javadoc-formatting",
// (DE) Aktiviert Kommentare im Java-Format.
// (EN) Enables comments in Java formatting.
@ -79,17 +49,9 @@
".venv*/",
".vscode/",
"bin/",
"app/core/bin/",
"app/common/bin/",
"app/proprietary/bin/",
"build/",
"app/core/build/",
"app/common/build/",
"app/proprietary/build/",
"configs/",
"app/core/configs/",
"customFiles/",
"app/core/customFiles/",
"docs/",
"exampleYmlFiles",
"gradle/",
@ -101,9 +63,6 @@
".git-blame-ignore-revs",
".gitattributes",
".gitignore",
"app/core/.gitignore",
"app/common/.gitignore",
"app/proprietary/.gitignore",
".pre-commit-config.yaml",
],
// Enables signature help in Java.
@ -121,22 +80,4 @@
"spring.initializr.defaultLanguage": "Java",
"spring.initializr.defaultGroupId": "stirling.software.SPDF",
"spring.initializr.defaultArtifactId": "SPDF",
"java.jdt.ls.lombokSupport.enabled": true,
"html.format.wrapLineLength": 127,
"html.format.enable": true,
"html.format.indentInnerHtml": true,
"html.format.unformatted": "script,style,textarea",
"html.format.contentUnformatted": "pre,code",
"html.format.extraLiners": "head,body,/html",
"html.format.wrapAttributes": "force",
"html.format.wrapAttributesIndentSize": 2,
"html.format.indentHandlebars": true,
"html.format.preserveNewLines": true,
"html.format.maxPreserveNewLines": 2,
"stylelint.configFile": "devTools/.stylelintrc.json",
"java.project.sourcePaths": [
"app/core/src/main/java",
"app/common/src/main/java",
"app/proprietary/src/main/java"
]
}

View File

@ -25,7 +25,7 @@ Please make sure your Pull Request adheres to the following guidelines:
## Translations
If you would like to add or modify a translation, please see [How to add new languages to Stirling-PDF](devGuide/HowToAddNewLanguage.md). Also, please create a Pull Request so others can use it!
If you would like to add or modify a translation, please see [How to add new languages to Stirling-PDF](HowToAddNewLanguage.md). Also, please create a Pull Request so others can use it!
## Docs
@ -37,18 +37,7 @@ First, make sure you've read the section [Pull Requests](#pull-requests).
If, at any point in time, you have a question, please feel free to ask in the same issue thread or in our [Discord](https://discord.gg/FJUSXUSYec).
## Developer Documentation
For technical guides, setup instructions, and development resources, please see our [Developer Documentation](devGuide/) which includes:
- [Developer Guide](devGuide/DeveloperGuide.md) - Main setup and architecture guide
- [Exception Handling Guide](devGuide/EXCEPTION_HANDLING_GUIDE.md) - Error handling patterns and i18n
- [Translation Guide](devGuide/HowToAddNewLanguage.md) - Adding new languages
- And more in the [devGuide folder](devGuide/)
For configuration and usage guides, see:
- [Database Guide](DATABASE.md) - Database setup and configuration
- [OCR Guide](HowToUseOCR.md) - OCR setup and configuration
Developers should review our [Developer Guide](DeveloperGuide.md)
## License

View File

@ -55,7 +55,7 @@ Stirling-PDF uses Lombok to reduce boilerplate code. Some IDEs, like Eclipse, do
Visit the [Lombok website](https://projectlombok.org/setup/) for installation instructions specific to your IDE.
5. Add environment variable
For local testing, you should generally be testing the full 'Security' version of Stirling PDF. To do this, you must add the environment flag DISABLE_ADDITIONAL_FEATURES=false to your system and/or IDE build/run step.
For local testing, you should generally be testing the full 'Security' version of Stirling-PDF. To do this, you must add the environment flag DOCKER_ENABLE_SECURITY=true to your system and/or IDE build/run step.
## 4. Project Structure
@ -114,9 +114,9 @@ Stirling-PDF offers several Docker versions:
Stirling-PDF provides several example Docker Compose files in the `exampleYmlFiles` directory, such as:
- `docker-compose-latest.yml`: Latest version without login and security features
- `docker-compose-latest-security.yml`: Latest version with login and security features enabled
- `docker-compose-latest-fat-security.yml`: Fat version with login and security features enabled
- `docker-compose-latest.yml`: Latest version without security features
- `docker-compose-latest-security.yml`: Latest version with security features enabled
- `docker-compose-latest-fat-security.yml`: Fat version with security features enabled
These files provide pre-configured setups for different scenarios. For example, here's a snippet from `docker-compose-latest-security.yml`:
@ -137,11 +137,11 @@ services:
ports:
- "8080:8080"
volumes:
- ./stirling/latest/data:/usr/share/tessdata:rw
- ./stirling/latest/config:/configs:rw
- ./stirling/latest/logs:/logs:rw
- /stirling/latest/data:/usr/share/tessdata:rw
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:
DISABLE_ADDITIONAL_FEATURES: "false"
DOCKER_ENABLE_SECURITY: "true"
SECURITY_ENABLELOGIN: "true"
PUID: 1002
PGID: 1002
@ -170,7 +170,7 @@ Stirling-PDF uses different Docker images for various configurations. The build
1. Set the security environment variable:
```bash
export DISABLE_ADDITIONAL_FEATURES=true # or false for to enable login and security features for builds
export DOCKER_ENABLE_SECURITY=false # or true for security-enabled builds
```
2. Build the project with Gradle:
@ -193,10 +193,10 @@ Stirling-PDF uses different Docker images for various configurations. The build
docker build --no-cache --pull --build-arg VERSION_TAG=alpha -t stirlingtools/stirling-pdf:latest-ultra-lite -f ./Dockerfile.ultra-lite .
```
For the fat version (with login and security features enabled):
For the fat version (with security enabled):
```bash
export DISABLE_ADDITIONAL_FEATURES=false
export DOCKER_ENABLE_SECURITY=true
docker build --no-cache --pull --build-arg VERSION_TAG=alpha -t stirlingtools/stirling-pdf:latest-fat -f ./Dockerfile.fat .
```
@ -272,7 +272,7 @@ Important notes:
6. Push your changes to your fork.
7. Submit a pull request to the main repository.
8. See additional [contributing guidelines](../CONTRIBUTING.md).
8. See additional [contributing guidelines](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/CONTRIBUTING.md).
When you raise a PR:
@ -332,7 +332,7 @@ Thymeleaf is a server-side Java HTML template engine. It is used in Stirling-PDF
### Thymeleaf overview
In Stirling-PDF, Thymeleaf is used to create HTML templates that are rendered on the server side. These templates are located in the `app/core/src/main/resources/templates` directory. Thymeleaf templates use a combination of HTML and special Thymeleaf attributes to dynamically generate content.
In Stirling-PDF, Thymeleaf is used to create HTML templates that are rendered on the server side. These templates are located in the `src/main/resources/templates` directory. Thymeleaf templates use a combination of HTML and special Thymeleaf attributes to dynamically generate content.
Some examples of this are:
@ -384,7 +384,7 @@ This would generate n entries of tr for each person in exampleData
### Adding a New Feature to the Backend (API)
1. **Create a New Controller:**
- Create a new Java class in the `app/core/src/main/java/stirling/software/SPDF/controller/api` directory.
- Create a new Java class in the `src/main/java/stirling/software/SPDF/controller/api` directory.
- Annotate the class with `@RestController` and `@RequestMapping` to define the API endpoint.
- Ensure to add API documentation annotations like `@Tag(name = "General", description = "General APIs")` and `@Operation(summary = "Crops a PDF document", description = "This operation takes an input PDF file and crops it according to the given coordinates. Input:PDF Output:PDF Type:SISO")`.
@ -411,7 +411,7 @@ This would generate n entries of tr for each person in exampleData
```
2. **Define the Service Layer:** (Not required but often useful)
- Create a new service class in the `app/core/src/main/java/stirling/software/SPDF/service` directory.
- Create a new service class in the `src/main/java/stirling/software/SPDF/service` directory.
- Implement the business logic for the new feature.
```java
@ -463,7 +463,7 @@ This would generate n entries of tr for each person in exampleData
### Adding a New Feature to the Frontend (UI)
1. **Create a New Thymeleaf Template:**
- Create a new HTML file in the `app/core/src/main/resources/templates` directory.
- Create a new HTML file in the `src/main/resources/templates` directory.
- Use Thymeleaf attributes to dynamically generate content.
- Use `extract-page.html` as a base example for the HTML template, which is useful to ensure importing of the general layout, navbar, and footer.
@ -507,7 +507,7 @@ This would generate n entries of tr for each person in exampleData
```
2. **Create a New Controller for the UI:**
- Create a new Java class in the `app/core/src/main/java/stirling/software/SPDF/controller/ui` directory.
- Create a new Java class in the `src/main/java/stirling/software/SPDF/controller/ui` directory.
- Annotate the class with `@Controller` and `@RequestMapping` to define the UI endpoint.
```java
@ -537,11 +537,11 @@ This would generate n entries of tr for each person in exampleData
3. **Update the Navigation Bar:**
- Add a link to the new feature page in the navigation bar.
- Update the `app/core/src/main/resources/templates/fragments/navbar.html` file.
- Update the `src/main/resources/templates/fragments/navbar.html` file.
```html
<li class="nav-item">
<a class="nav-link" th:href="@{'/new-feature'}">New Feature</a>
<a class="nav-link" th:href="@{/new-feature}">New Feature</a>
</li>
```
@ -551,7 +551,7 @@ When adding a new feature or modifying existing ones in Stirling-PDF, you'll nee
### 1. Locate Existing Language Files
Find the existing `messages.properties` files in the `app/core/src/main/resources` directory. You'll see files like:
Find the existing `messages.properties` files in the `src/main/resources` directory. You'll see files like:
- `messages.properties` (default, usually English)
- `messages_en_GB.properties`

View File

@ -1,11 +1,12 @@
# Main stage
FROM alpine:3.22.1@sha256:4bcff63911fcb4448bd4fdacec207030997caf25e9bea4045fa6c8c44de311d1
FROM alpine:3.21.3@sha256:a8560b36e8b8210634f77d9f7f9efd7ffa463e380b75e2e74aff4511df3ef88c
# Copy necessary files
COPY scripts /scripts
COPY pipeline /pipeline
COPY app/core/src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
COPY app/core/build/libs/*.jar app.jar
COPY src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
#COPY src/main/resources/static/fonts/*.otf /usr/share/fonts/opentype/noto/
COPY build/libs/*.jar app.jar
ARG VERSION_TAG
@ -22,7 +23,7 @@ LABEL org.opencontainers.image.version="${VERSION_TAG}"
LABEL org.opencontainers.image.keywords="PDF, manipulation, merge, split, convert, OCR, watermark"
# Set Environment Variables
ENV DISABLE_ADDITIONAL_FEATURES=true \
ENV DOCKER_ENABLE_SECURITY=false \
VERSION_TAG=$VERSION_TAG \
JAVA_BASE_OPTS="-XX:+UnlockExperimentalVMOptions -XX:MaxRAMPercentage=75 -XX:InitiatingHeapOccupancyPercent=20 -XX:+G1PeriodicGCInvokesConcurrent -XX:G1PeriodicGCInterval=10000 -XX:+UseStringDeduplication -XX:G1PeriodicGCSystemLoadThreshold=70" \
JAVA_CUSTOM_OPTS="" \
@ -33,11 +34,7 @@ ENV DISABLE_ADDITIONAL_FEATURES=true \
PYTHONPATH=/usr/lib/libreoffice/program:/opt/venv/lib/python3.12/site-packages \
UNO_PATH=/usr/lib/libreoffice/program \
URE_BOOTSTRAP=file:///usr/lib/libreoffice/program/fundamentalrc \
PATH=$PATH:/opt/venv/bin \
STIRLING_TEMPFILES_DIRECTORY=/tmp/stirling-pdf \
TMPDIR=/tmp/stirling-pdf \
TEMP=/tmp/stirling-pdf \
TMP=/tmp/stirling-pdf
PATH=$PATH:/opt/venv/bin
# JDK for app
@ -51,6 +48,7 @@ RUN echo "@main https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/a
tini \
bash \
curl \
qpdf \
shadow \
su-exec \
openssl \
@ -68,36 +66,30 @@ RUN echo "@main https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/a
tesseract-ocr-data-deu \
tesseract-ocr-data-fra \
tesseract-ocr-data-por \
unpaper \
# CV
py3-opencv \
python3 \
ocrmypdf \
py3-pip \
py3-pillow@testing \
py3-pdf2image@testing \
# URW Base 35 fonts for better PDF rendering
font-urw-base35 && \
py3-pdf2image@testing && \
python3 -m venv /opt/venv && \
/opt/venv/bin/pip install --upgrade pip setuptools && \
/opt/venv/bin/pip install --upgrade pip && \
/opt/venv/bin/pip install --no-cache-dir --upgrade unoserver weasyprint && \
ln -s /usr/lib/libreoffice/program/uno.py /opt/venv/lib/python3.12/site-packages/ && \
ln -s /usr/lib/libreoffice/program/unohelper.py /opt/venv/lib/python3.12/site-packages/ && \
ln -s /usr/lib/libreoffice/program /opt/venv/lib/python3.12/site-packages/LibreOffice && \
mv /usr/share/tessdata /usr/share/tessdata-original && \
mkdir -p $HOME /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders /tmp/stirling-pdf && \
# Configure URW Base 35 fonts
ln -s /usr/share/fontconfig/conf.avail/69-urw-*.conf /etc/fonts/conf.d/ && \
mkdir -p $HOME /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders && \
fc-cache -f -v && \
chmod +x /scripts/* && \
chmod +x /scripts/init.sh && \
# User permissions
addgroup -S stirlingpdfgroup && adduser -S stirlingpdfuser -G stirlingpdfgroup && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline /tmp/stirling-pdf && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline && \
chown stirlingpdfuser:stirlingpdfgroup /app.jar
EXPOSE 8080/tcp
# Set user and run command
ENTRYPOINT ["tini", "--", "/scripts/init.sh"]
CMD ["sh", "-c", "java -Dfile.encoding=UTF-8 -Djava.io.tmpdir=/tmp/stirling-pdf -jar /app.jar & /opt/venv/bin/unoserver --port 2003 --interface 127.0.0.1"]
CMD ["sh", "-c", "java -Dfile.encoding=UTF-8 -jar /app.jar & /opt/venv/bin/unoserver --port 2003 --interface 127.0.0.1"]

View File

@ -19,7 +19,7 @@ RUN apt-get update && apt-get install -y \
# settings.yml | tessdataDir: /usr/share/tesseract-ocr/5/tessdata
tesseract-ocr \
tesseract-ocr-eng \
fonts-terminus fonts-dejavu fonts-font-awesome fonts-noto fonts-noto-core fonts-noto-cjk fonts-noto-extra fonts-liberation fonts-linuxlibertine fonts-urw-base35 \
fonts-terminus fonts-dejavu fonts-font-awesome fonts-noto fonts-noto-core fonts-noto-cjk fonts-noto-extra fonts-liberation fonts-linuxlibertine \
python3-uno \
python3-venv \
# ss -tln
@ -27,16 +27,11 @@ RUN apt-get update && apt-get install -y \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
# Setze die Environment Variable für setuptools
ENV SETUPTOOLS_USE_DISTUTILS=local \
STIRLING_TEMPFILES_DIRECTORY=/tmp/stirling-pdf \
TMPDIR=/tmp/stirling-pdf \
TEMP=/tmp/stirling-pdf \
TMP=/tmp/stirling-pdf
ENV SETUPTOOLS_USE_DISTUTILS=local
# Installation der benötigten Python-Pakete
RUN python3 -m venv --system-site-packages /opt/venv \
&& . /opt/venv/bin/activate \
&& pip install --upgrade pip setuptools \
&& pip install --no-cache-dir WeasyPrint pdf2image pillow unoserver opencv-python-headless pre-commit
# Füge den venv-Pfad zur globalen PATH-Variable hinzu, damit die Tools verfügbar sind
@ -44,10 +39,8 @@ ENV PATH="/opt/venv/bin:$PATH"
COPY . /workspace
RUN mkdir -p /tmp/stirling-pdf \
&& fc-cache -f -v \
&& adduser --disabled-password --gecos '' devuser \
&& chown -R devuser:devuser /home/devuser /workspace /tmp/stirling-pdf
RUN adduser --disabled-password --gecos '' devuser \
&& chown -R devuser:devuser /home/devuser /workspace
RUN echo "devuser ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/devuser \
&& chmod 0440 /etc/sudoers.d/devuser

View File

@ -5,9 +5,6 @@ COPY build.gradle .
COPY settings.gradle .
COPY gradlew .
COPY gradle gradle/
COPY app/core/build.gradle core/.
COPY app/common/build.gradle common/.
COPY app/proprietary/build.gradle proprietary/.
RUN ./gradlew build -x spotlessApply -x spotlessCheck -x test -x sonarqube || return 0
# Set the working directory
@ -16,25 +13,24 @@ WORKDIR /app
# Copy the entire project to the working directory
COPY . .
# Build the application with DISABLE_ADDITIONAL_FEATURES=false
RUN DISABLE_ADDITIONAL_FEATURES=false \
# Build the application with DOCKER_ENABLE_SECURITY=false
RUN DOCKER_ENABLE_SECURITY=true \
STIRLING_PDF_DESKTOP_UI=false \
./gradlew clean build -x spotlessApply -x spotlessCheck -x test -x sonarqube
# Main stage
FROM alpine:3.22.1@sha256:4bcff63911fcb4448bd4fdacec207030997caf25e9bea4045fa6c8c44de311d1
FROM alpine:3.21.3@sha256:a8560b36e8b8210634f77d9f7f9efd7ffa463e380b75e2e74aff4511df3ef88c
# Copy necessary files
COPY scripts /scripts
COPY pipeline /pipeline
COPY app/core/src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
# first /app directory is for the build stage, second is for the final image
COPY --from=build /app/app/core/build/libs/*.jar app.jar
COPY src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
COPY --from=build /app/build/libs/*.jar app.jar
ARG VERSION_TAG
# Set Environment Variables
ENV DISABLE_ADDITIONAL_FEATURES=true \
ENV DOCKER_ENABLE_SECURITY=false \
VERSION_TAG=$VERSION_TAG \
JAVA_BASE_OPTS="-XX:+UnlockExperimentalVMOptions -XX:MaxRAMPercentage=75 -XX:InitiatingHeapOccupancyPercent=20 -XX:+G1PeriodicGCInvokesConcurrent -XX:G1PeriodicGCInterval=10000 -XX:+UseStringDeduplication -XX:G1PeriodicGCSystemLoadThreshold=70" \
JAVA_CUSTOM_OPTS="" \
@ -47,11 +43,7 @@ ENV DISABLE_ADDITIONAL_FEATURES=true \
PYTHONPATH=/usr/lib/libreoffice/program:/opt/venv/lib/python3.12/site-packages \
UNO_PATH=/usr/lib/libreoffice/program \
URE_BOOTSTRAP=file:///usr/lib/libreoffice/program/fundamentalrc \
PATH=$PATH:/opt/venv/bin \
STIRLING_TEMPFILES_DIRECTORY=/tmp/stirling-pdf \
TMPDIR=/tmp/stirling-pdf \
TEMP=/tmp/stirling-pdf \
TMP=/tmp/stirling-pdf
PATH=$PATH:/opt/venv/bin
# JDK for app
@ -77,39 +69,36 @@ RUN echo "@main https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/a
# pdftohtml
poppler-utils \
# OCR MY PDF (unpaper for descew and other advanced featues)
qpdf \
tesseract-ocr-data-eng \
tesseract-ocr-data-chi_sim \
tesseract-ocr-data-deu \
tesseract-ocr-data-fra \
tesseract-ocr-data-por \
unpaper \
font-terminus font-dejavu font-noto font-noto-cjk font-awesome font-noto-extra font-liberation font-linux-libertine font-urw-base35 \
font-terminus font-dejavu font-noto font-noto-cjk font-awesome font-noto-extra font-liberation font-linux-libertine \
# CV
py3-opencv \
python3 \
ocrmypdf \
py3-pip \
py3-pillow@testing \
py3-pdf2image@testing && \
python3 -m venv /opt/venv && \
/opt/venv/bin/pip install --upgrade pip setuptools && \
/opt/venv/bin/pip install --upgrade pip && \
/opt/venv/bin/pip install --no-cache-dir --upgrade unoserver weasyprint && \
ln -s /usr/lib/libreoffice/program/uno.py /opt/venv/lib/python3.12/site-packages/ && \
ln -s /usr/lib/libreoffice/program/unohelper.py /opt/venv/lib/python3.12/site-packages/ && \
ln -s /usr/lib/libreoffice/program /opt/venv/lib/python3.12/site-packages/LibreOffice && \
mv /usr/share/tessdata /usr/share/tessdata-original && \
mkdir -p $HOME /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders /tmp/stirling-pdf && \
# Configure URW Base 35 fonts
ln -s /usr/share/fontconfig/conf.avail/69-urw-*.conf /etc/fonts/conf.d/ && \
mkdir -p $HOME /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders && \
fc-cache -f -v && \
chmod +x /scripts/* && \
chmod +x /scripts/init.sh && \
# User permissions
addgroup -S stirlingpdfgroup && adduser -S stirlingpdfuser -G stirlingpdfgroup && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline /tmp/stirling-pdf && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline && \
chown stirlingpdfuser:stirlingpdfgroup /app.jar
EXPOSE 8080/tcp
# Set user and run command
ENTRYPOINT ["tini", "--", "/scripts/init.sh"]
CMD ["sh", "-c", "java -Dfile.encoding=UTF-8 -Djava.io.tmpdir=/tmp/stirling-pdf -jar /app.jar & /opt/venv/bin/unoserver --port 2003 --interface 127.0.0.1"]
CMD ["sh", "-c", "java -Dfile.encoding=UTF-8 -jar /app.jar & /opt/venv/bin/unoserver --port 2003 --interface 127.0.0.1"]

View File

@ -1,28 +1,24 @@
# use alpine
FROM alpine:3.22.1@sha256:4bcff63911fcb4448bd4fdacec207030997caf25e9bea4045fa6c8c44de311d1
FROM alpine:3.21.3@sha256:a8560b36e8b8210634f77d9f7f9efd7ffa463e380b75e2e74aff4511df3ef88c
ARG VERSION_TAG
# Set Environment Variables
ENV DISABLE_ADDITIONAL_FEATURES=true \
ENV DOCKER_ENABLE_SECURITY=false \
HOME=/home/stirlingpdfuser \
VERSION_TAG=$VERSION_TAG \
JAVA_BASE_OPTS="-XX:+UnlockExperimentalVMOptions -XX:MaxRAMPercentage=75 -XX:InitiatingHeapOccupancyPercent=20 -XX:+G1PeriodicGCInvokesConcurrent -XX:G1PeriodicGCInterval=10000 -XX:+UseStringDeduplication -XX:G1PeriodicGCSystemLoadThreshold=70" \
JAVA_CUSTOM_OPTS="" \
PUID=1000 \
PGID=1000 \
UMASK=022 \
STIRLING_TEMPFILES_DIRECTORY=/tmp/stirling-pdf \
TMPDIR=/tmp/stirling-pdf \
TEMP=/tmp/stirling-pdf \
TMP=/tmp/stirling-pdf
UMASK=022
# Copy necessary files
COPY scripts/download-security-jar.sh /scripts/download-security-jar.sh
COPY scripts/init-without-ocr.sh /scripts/init-without-ocr.sh
COPY scripts/installFonts.sh /scripts/installFonts.sh
COPY pipeline /pipeline
COPY app/core/build/libs/*.jar app.jar
COPY build/libs/*.jar app.jar
# Set up necessary directories and permissions
RUN echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/apk/repositories && \
@ -39,10 +35,10 @@ RUN echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /et
su-exec \
openjdk21-jre && \
# User permissions
mkdir -p /configs /logs /customFiles /usr/share/fonts/opentype/noto /tmp/stirling-pdf && \
mkdir -p /configs /logs /customFiles /usr/share/fonts/opentype/noto && \
chmod +x /scripts/*.sh && \
addgroup -S stirlingpdfgroup && adduser -S stirlingpdfuser -G stirlingpdfgroup && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /configs /customFiles /pipeline /tmp/stirling-pdf && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /configs /customFiles /pipeline && \
chown stirlingpdfuser:stirlingpdfgroup /app.jar
# Set environment variables
@ -52,4 +48,4 @@ EXPOSE 8080/tcp
# Run the application
ENTRYPOINT ["tini", "--", "/scripts/init-without-ocr.sh"]
CMD ["java", "-Dfile.encoding=UTF-8", "-Djava.io.tmpdir=/tmp/stirling-pdf", "-jar", "/app.jar"]
CMD ["java", "-Dfile.encoding=UTF-8", "-jar", "/app.jar"]

View File

@ -10,7 +10,7 @@ Fork Stirling-PDF and create a new branch out of `main`.
Then add a reference to the language in the navbar by adding a new language entry to the dropdown:
- Edit the file: [languages.html](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/app/core/src/main/resources/templates/fragments/languages.html)
- Edit the file: [languages.html](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/src/main/resources/templates/fragments/languages.html)
For example, to add Polish, you would add:
@ -25,7 +25,7 @@ The `data-bs-language-code` is the code used to reference the file in the next s
Start by copying the existing English property file:
- [messages_en_GB.properties](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/app/core/src/main/resources/messages_en_GB.properties)
- [messages_en_GB.properties](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/src/main/resources/messages_en_GB.properties)
Copy and rename it to `messages_{your data-bs-language-code here}.properties`. In the Polish example, you would set the name to `messages_pl_PL.properties`.
@ -61,16 +61,8 @@ Make sure to place the entry under the correct language section. This helps main
#### Windows command
```powershell
python .github/scripts/check_language_properties.py --reference-file app\core\src\main\resources\messages_en_GB.properties --branch "" --files app\core\src\main\resources\messages_pl_PL.properties
```ps
python .github/scripts/check_language_properties.py --reference-file src\main\resources\messages_en_GB.properties --branch "" --files src\main\resources\messages_pl_PL.properties
python .github/scripts/check_language_properties.py --reference-file app\core\src\main\resources\messages_en_GB.properties --branch "" --check-file app\core\src\main\resources\messages_pl_PL.properties
```
#### Linux command
```bash
python3 .github/scripts/check_language_properties.py --reference-file app/core/src/main/resources/messages_en_GB.properties --branch "" --files app/core/src/main/resources/messages_pl_PL.properties
python3 .github/scripts/check_language_properties.py --reference-file app/core/src/main/resources/messages_en_GB.properties --branch "" --check-file app/core/src/main/resources/messages_pl_PL.properties
python .github/scripts/check_language_properties.py --reference-file src\main\resources\messages_en_GB.properties --branch "" --check-file src\main\resources\messages_pl_PL.properties
```

View File

@ -1,13 +1,6 @@
MIT License
Copyright (c) 2025 Stirling PDF Inc.
Portions of this software are licensed as follows:
* All content that resides under the "app/proprietary/" directory of this repository,
if that directory exists, is licensed under the license defined in "app/proprietary/LICENSE".
* Content outside of the above mentioned directories or restrictions above is
available under the MIT License as defined below.
Copyright (c) 2024 Stirling Tools
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@ -29,7 +29,7 @@ All documentation available at [https://docs.stirlingpdf.com/](https://docs.stir
- API for integration with external scripts
- Optional Login and Authentication support (see [here](https://docs.stirlingpdf.com/Advanced%20Configuration/System%20and%20Security) for documentation)
- Database Backup and Import (see [here](https://docs.stirlingpdf.com/Advanced%20Configuration/DATABASE) for documentation)
- Enterprise features like SSO (see [here](https://docs.stirlingpdf.com/Advanced%20Configuration/Single%20Sign-On%20Configuration) for documentation)
- Enterprise features like SSO see [here](https://docs.stirlingpdf.com/Enterprise%20Edition)
## PDF Features
@ -112,63 +112,63 @@ Visit our comprehensive documentation at [docs.stirlingpdf.com](https://docs.sti
## Supported Languages
Stirling-PDF currently supports 40 languages!
Stirling-PDF currently supports 39 languages!
| Language | Progress |
| -------------------------------------------- | -------------------------------------- |
| Arabic (العربية) (ar_AR) | ![63%](https://geps.dev/progress/63) |
| Azerbaijani (Azərbaycan Dili) (az_AZ) | ![63%](https://geps.dev/progress/63) |
| Basque (Euskara) (eu_ES) | ![37%](https://geps.dev/progress/37) |
| Bulgarian (Български) (bg_BG) | ![70%](https://geps.dev/progress/70) |
| Catalan (Català) (ca_CA) | ![69%](https://geps.dev/progress/69) |
| Croatian (Hrvatski) (hr_HR) | ![62%](https://geps.dev/progress/62) |
| Czech (Česky) (cs_CZ) | ![71%](https://geps.dev/progress/71) |
| Danish (Dansk) (da_DK) | ![63%](https://geps.dev/progress/63) |
| Dutch (Nederlands) (nl_NL) | ![61%](https://geps.dev/progress/61) |
| Arabic (العربية) (ar_AR) | ![83%](https://geps.dev/progress/83) |
| Azerbaijani (Azərbaycan Dili) (az_AZ) | ![82%](https://geps.dev/progress/82) |
| Basque (Euskara) (eu_ES) | ![48%](https://geps.dev/progress/48) |
| Bulgarian (Български) (bg_BG) | ![92%](https://geps.dev/progress/92) |
| Catalan (Català) (ca_CA) | ![89%](https://geps.dev/progress/89) |
| Croatian (Hrvatski) (hr_HR) | ![81%](https://geps.dev/progress/81) |
| Czech (Česky) (cs_CZ) | ![91%](https://geps.dev/progress/91) |
| Danish (Dansk) (da_DK) | ![80%](https://geps.dev/progress/80) |
| Dutch (Nederlands) (nl_NL) | ![79%](https://geps.dev/progress/79) |
| English (English) (en_GB) | ![100%](https://geps.dev/progress/100) |
| English (US) (en_US) | ![100%](https://geps.dev/progress/100) |
| French (Français) (fr_FR) | ![91%](https://geps.dev/progress/91) |
| German (Deutsch) (de_DE) | ![100%](https://geps.dev/progress/100) |
| Greek (Ελληνικά) (el_GR) | ![69%](https://geps.dev/progress/69) |
| Hindi (हिंदी) (hi_IN) | ![68%](https://geps.dev/progress/68) |
| French (Français) (fr_FR) | ![92%](https://geps.dev/progress/92) |
| German (Deutsch) (de_DE) | ![99%](https://geps.dev/progress/99) |
| Greek (Ελληνικά) (el_GR) | ![91%](https://geps.dev/progress/91) |
| Hindi (हिंदी) (hi_IN) | ![91%](https://geps.dev/progress/91) |
| Hungarian (Magyar) (hu_HU) | ![99%](https://geps.dev/progress/99) |
| Indonesian (Bahasa Indonesia) (id_ID) | ![63%](https://geps.dev/progress/63) |
| Irish (Gaeilge) (ga_IE) | ![70%](https://geps.dev/progress/70) |
| Italian (Italiano) (it_IT) | ![98%](https://geps.dev/progress/98) |
| Japanese (日本語) (ja_JP) | ![95%](https://geps.dev/progress/95) |
| Korean (한국어) (ko_KR) | ![69%](https://geps.dev/progress/69) |
| Norwegian (Norsk) (no_NB) | ![67%](https://geps.dev/progress/67) |
| Persian (فارسی) (fa_IR) | ![66%](https://geps.dev/progress/66) |
| Polish (Polski) (pl_PL) | ![73%](https://geps.dev/progress/73) |
| Portuguese (Português) (pt_PT) | ![70%](https://geps.dev/progress/70) |
| Portuguese Brazilian (Português) (pt_BR) | ![77%](https://geps.dev/progress/77) |
| Romanian (Română) (ro_RO) | ![59%](https://geps.dev/progress/59) |
| Russian (Русский) (ru_RU) | ![90%](https://geps.dev/progress/90) |
| Serbian Latin alphabet (Srpski) (sr_LATN_RS) | ![97%](https://geps.dev/progress/97) |
| Simplified Chinese (简体中文) (zh_CN) | ![95%](https://geps.dev/progress/95) |
| Slovakian (Slovensky) (sk_SK) | ![53%](https://geps.dev/progress/53) |
| Slovenian (Slovenščina) (sl_SI) | ![73%](https://geps.dev/progress/73) |
| Spanish (Español) (es_ES) | ![75%](https://geps.dev/progress/75) |
| Swedish (Svenska) (sv_SE) | ![67%](https://geps.dev/progress/67) |
| Thai (ไทย) (th_TH) | ![60%](https://geps.dev/progress/60) |
| Tibetan (བོད་ཡིག་) (bo_CN) | ![66%](https://geps.dev/progress/66) |
| Traditional Chinese (繁體中文) (zh_TW) | ![77%](https://geps.dev/progress/77) |
| Turkish (Türkçe) (tr_TR) | ![82%](https://geps.dev/progress/82) |
| Ukrainian (Українська) (uk_UA) | ![72%](https://geps.dev/progress/72) |
| Vietnamese (Tiếng Việt) (vi_VN) | ![58%](https://geps.dev/progress/58) |
| Malayalam (മലയാളം) (ml_IN) | ![75%](https://geps.dev/progress/75) |
| Indonesian (Bahasa Indonesia) (id_ID) | ![80%](https://geps.dev/progress/80) |
| Irish (Gaeilge) (ga_IE) | ![91%](https://geps.dev/progress/91) |
| Italian (Italiano) (it_IT) | ![99%](https://geps.dev/progress/99) |
| Japanese (日本語) (ja_JP) | ![93%](https://geps.dev/progress/93) |
| Korean (한국어) (ko_KR) | ![92%](https://geps.dev/progress/92) |
| Norwegian (Norsk) (no_NB) | ![86%](https://geps.dev/progress/86) |
| Persian (فارسی) (fa_IR) | ![87%](https://geps.dev/progress/87) |
| Polish (Polski) (pl_PL) | ![95%](https://geps.dev/progress/95) |
| Portuguese (Português) (pt_PT) | ![91%](https://geps.dev/progress/91) |
| Portuguese Brazilian (Português) (pt_BR) | ![97%](https://geps.dev/progress/97) |
| Romanian (Română) (ro_RO) | ![75%](https://geps.dev/progress/75) |
| Russian (Русский) (ru_RU) | ![93%](https://geps.dev/progress/93) |
| Serbian Latin alphabet (Srpski) (sr_LATN_RS) | ![60%](https://geps.dev/progress/60) |
| Simplified Chinese (简体中文) (zh_CN) | ![93%](https://geps.dev/progress/93) |
| Slovakian (Slovensky) (sk_SK) | ![69%](https://geps.dev/progress/69) |
| Slovenian (Slovenščina) (sl_SI) | ![94%](https://geps.dev/progress/94) |
| Spanish (Español) (es_ES) | ![98%](https://geps.dev/progress/98) |
| Swedish (Svenska) (sv_SE) | ![87%](https://geps.dev/progress/87) |
| Thai (ไทย) (th_TH) | ![80%](https://geps.dev/progress/80) |
| Tibetan (བོད་ཡིག་) (zh_BO) | ![88%](https://geps.dev/progress/88) |
| Traditional Chinese (繁體中文) (zh_TW) | ![99%](https://geps.dev/progress/99) |
| Turkish (Türkçe) (tr_TR) | ![97%](https://geps.dev/progress/97) |
| Ukrainian (Українська) (uk_UA) | ![96%](https://geps.dev/progress/96) |
| Vietnamese (Tiếng Việt) (vi_VN) | ![73%](https://geps.dev/progress/73) |
## Stirling PDF Enterprise
Stirling PDF offers an Enterprise edition of its software. This is the same great software but with added features, support and comforts.
Check out our [Enterprise docs](https://docs.stirlingpdf.com/Pro)
Check out our [Enterprise docs](https://docs.stirlingpdf.com/Enterprise%20Edition)
## 🤝 Looking to contribute?
Join our community:
- [Contribution Guidelines](CONTRIBUTING.md)
- [Translation Guide (How to add custom languages)](devGuide/HowToAddNewLanguage.md)
- [Developer Guide](devGuide/DeveloperGuide.md)
- [Translation Guide (How to add custom languages)](HowToAddNewLanguage.md)
- [Issue Tracker](https://github.com/Stirling-Tools/Stirling-PDF/issues)
- [Discord Community](https://discord.gg/HYmhKj45pU)
- [Developer Guide](DeveloperGuide.md)

View File

@ -124,18 +124,10 @@
"moduleName": ".*",
"moduleLicense": "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0"
},
{
"moduleName": ".*",
"moduleLicense": "Eclipse Public License 1.0"
},
{
"moduleName": ".*",
"moduleLicense": "Eclipse Public License - v 1.0"
},
{
"moduleName": ".*",
"moduleLicense": "Eclipse Public License v2.0"
},
{
"moduleName": ".*",
"moduleLicense": "Eclipse Public License v. 2.0"

196
app/common/.gitignore vendored
View File

@ -1,196 +0,0 @@
### Eclipse ###
.metadata
bin/
tmp/
*.tmp
*.bak
*.exe
*.swp
*~.nib
local.properties
.settings/
.loadpath
.recommenders
.classpath
.project
version.properties
#### Stirling-PDF Files ###
pipeline/watchedFolders/
pipeline/finishedFolders/
customFiles/
configs/
watchedFolders/
clientWebUI/
!cucumber/
!cucumber/exampleFiles/
!cucumber/exampleFiles/example_html.zip
exampleYmlFiles/stirling/
/testing/file_snapshots
SwaggerDoc.json
# Gradle
.gradle
.lock
# External tool builders
.externalToolBuilders/
# Locally stored "Eclipse launch configurations"
*.launch
# PyDev specific (Python IDE for Eclipse)
*.pydevproject
# CDT-specific (C/C++ Development Tooling)
.cproject
# CDT- autotools
.autotools
# Java annotation processor (APT)
.factorypath
# PDT-specific (PHP Development Tools)
.buildpath
# sbteclipse plugin
.target
# Tern plugin
.tern-project
# TeXlipse plugin
.texlipse
# STS (Spring Tool Suite)
.springBeans
# Code Recommenders
.recommenders/
# Annotation Processing
.apt_generated/
.apt_generated_test/
# Scala IDE specific (Scala & Java development for Eclipse)
.cache-main
.scala_dependencies
.worksheet
# Uncomment this line if you wish to ignore the project description file.
# Typically, this file would be tracked if it contains build/dependency configurations:
#.project
### Eclipse Patch ###
# Spring Boot Tooling
.sts4-cache/
### Git ###
# Created by git for backups. To disable backups in Git:
# $ git config --global mergetool.keepBackup false
*.orig
# Created by git when using merge tools for conflicts
*.BACKUP.*
*.BASE.*
*.LOCAL.*
*.REMOTE.*
*_BACKUP_*.txt
*_BASE_*.txt
*_LOCAL_*.txt
*_REMOTE_*.txt
### Java ###
# Compiled class file
*.class
# Log file
*.log
# BlueJ files
*.ctxt
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar
*.db
/build
/app/common/build/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*.pyo
# Virtual environments
.env*
.venv*
env*/
venv*/
ENV/
env.bak/
venv.bak/
# VS Code
/.vscode/**/*
!/.vscode/settings.json
!/.vscode/extensions.json
# IntelliJ IDEA
.idea/
*.iml
out/
# Ignore Mac DS_Store files
.DS_Store
**/.DS_Store
# cucumber
/cucumber/reports/**
# Certs and Security Files
*.p12
*.pk8
*.pem
*.crt
*.cer
*.cert
*.der
*.key
*.csr
*.kdbx
*.jks
*.asc
# SSH Keys
*.pub
*.priv
id_rsa
id_rsa.pub
id_ecdsa
id_ecdsa.pub
id_ed25519
id_ed25519.pub
.ssh/
*ssh
# cache
.cache
.ruff_cache
.mypy_cache
.pytest_cache
.ipynb_checkpoints
**/jcef-bundle/
# node_modules
node_modules/

View File

@ -1,33 +0,0 @@
// Configure bootRun to disable it or point to a main class
bootRun {
enabled = false
}
spotless {
java {
target sourceSets.main.allJava
googleJavaFormat(googleJavaFormatVersion).aosp().reorderImports(false)
importOrder("java", "javax", "org", "com", "net", "io", "jakarta", "lombok", "me", "stirling")
toggleOffOn()
trimTrailingWhitespace()
leadingTabsToSpaces()
endWithNewline()
}
}
dependencies {
api 'org.springframework.boot:spring-boot-starter-web'
api 'org.springframework.boot:spring-boot-starter-aop'
api 'org.springframework.boot:spring-boot-starter-thymeleaf'
api 'com.googlecode.owasp-java-html-sanitizer:owasp-java-html-sanitizer:20240325.1'
api 'com.fathzer:javaluator:3.0.6'
api 'com.posthog.java:posthog:1.2.0'
api 'org.apache.commons:commons-lang3:3.18.0'
api 'com.drewnoakes:metadata-extractor:2.19.0' // Image metadata extractor
api 'com.vladsch.flexmark:flexmark-html2md-converter:0.64.8'
api "org.apache.pdfbox:pdfbox:$pdfboxVersion"
api 'jakarta.servlet:jakarta.servlet-api:6.1.0'
api 'org.snakeyaml:snakeyaml-engine:2.10'
api "org.springdoc:springdoc-openapi-starter-webmvc-ui:2.8.9"
api 'jakarta.mail:jakarta.mail-api:2.1.3'
runtimeOnly 'org.eclipse.angus:angus-mail:2.0.3'
}

View File

@ -1,78 +0,0 @@
package stirling.software.common.annotations;
import java.lang.annotation.*;
import org.springframework.core.annotation.AliasFor;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
/**
* Shortcut for a POST endpoint that is executed through the Stirling "autojob" framework.
*
* <p>Behaviour notes:
*
* <ul>
* <li>The endpoint is registered with {@code POST} and, by default, consumes {@code
* multipart/form-data} unless you override {@link #consumes()}.
* <li>When the client supplies {@code ?async=true} the call is handed to {@link
* stirling.software.common.service.JobExecutorService JobExecutorService} where it may be
* queued, retried, tracked and subject to timeouts. For synchronous (default) invocations
* these advanced options are ignored.
* <li>Progress information (see {@link #trackProgress()}) is stored in {@link
* stirling.software.common.service.TaskManager TaskManager} and can be polled via <code>
* GET /api/v1/general/job/{id}</code>.
* </ul>
*
* <p>Unless stated otherwise an attribute only affects <em>async</em> execution.
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@RequestMapping(method = RequestMethod.POST)
public @interface AutoJobPostMapping {
/** Alias for {@link RequestMapping#value} the path mapping of the endpoint. */
@AliasFor(annotation = RequestMapping.class, attribute = "value")
String[] value() default {};
/** MIME types this endpoint accepts. Defaults to {@code multipart/form-data}. */
@AliasFor(annotation = RequestMapping.class, attribute = "consumes")
String[] consumes() default {"multipart/form-data"};
/**
* Maximum execution time in milliseconds before the job is aborted. A negative value means "use
* the application default".
*
* <p>Only honoured when {@code async=true}.
*/
long timeout() default -1;
/**
* Total number of attempts (initial + retries). Must be at least&nbsp;1. Retries are executed
* with exponential backoff.
*
* <p>Only honoured when {@code async=true}.
*/
int retryCount() default 1;
/**
* Record percentage / note updates so they can be retrieved via the REST status endpoint.
*
* <p>Only honoured when {@code async=true}.
*/
boolean trackProgress() default true;
/**
* If {@code true} the job may be placed in a queue instead of being rejected when resources are
* scarce.
*
* <p>Only honoured when {@code async=true}.
*/
boolean queueable() default false;
/**
* Relative resource weight (1100) used by the scheduler to prioritise / throttle jobs. Values
* below 1 are clamped to&nbsp;1, values above 100 to&nbsp;100.
*/
int resourceWeight() default 50;
}

View File

@ -1,289 +0,0 @@
package stirling.software.common.aop;
import java.io.IOException;
import java.time.Duration;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.*;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
import jakarta.servlet.http.HttpServletRequest;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.annotations.AutoJobPostMapping;
import stirling.software.common.model.api.PDFFile;
import stirling.software.common.service.FileOrUploadService;
import stirling.software.common.service.FileStorage;
import stirling.software.common.service.JobExecutorService;
@Aspect
@Component
@RequiredArgsConstructor
@Slf4j
@Order(0) // Highest precedence - executes before audit aspects
public class AutoJobAspect {
private static final Duration RETRY_BASE_DELAY = Duration.ofMillis(100);
private final JobExecutorService jobExecutorService;
private final HttpServletRequest request;
private final FileOrUploadService fileOrUploadService;
private final FileStorage fileStorage;
@Around("@annotation(autoJobPostMapping)")
public Object wrapWithJobExecution(
ProceedingJoinPoint joinPoint, AutoJobPostMapping autoJobPostMapping) {
// This aspect will run before any audit aspects due to @Order(0)
// Extract parameters from the request and annotation
boolean async = Boolean.parseBoolean(request.getParameter("async"));
log.debug(
"AutoJobAspect: Processing {} {} with async={}",
request.getMethod(),
request.getRequestURI(),
async);
long timeout = autoJobPostMapping.timeout();
int retryCount = autoJobPostMapping.retryCount();
boolean trackProgress = autoJobPostMapping.trackProgress();
log.debug(
"AutoJobPostMapping execution with async={}, timeout={}, retryCount={}, trackProgress={}",
async,
timeout > 0 ? timeout : "default",
retryCount,
trackProgress);
// Process arguments in-place to avoid type mismatch issues
Object[] args = processArgsInPlace(joinPoint.getArgs(), async);
// Extract queueable and resourceWeight parameters and validate
boolean queueable = autoJobPostMapping.queueable();
int resourceWeight = Math.max(1, Math.min(100, autoJobPostMapping.resourceWeight()));
// Integrate with the JobExecutorService
if (retryCount <= 1) {
// No retries needed, simple execution
return jobExecutorService.runJobGeneric(
async,
() -> {
try {
// Note: Progress tracking is handled in TaskManager/JobExecutorService
// The trackProgress flag controls whether detailed progress is stored
// for REST API queries, not WebSocket notifications
return joinPoint.proceed(args);
} catch (Throwable ex) {
log.error(
"AutoJobAspect caught exception during job execution: {}",
ex.getMessage(),
ex);
throw new RuntimeException(ex);
}
},
timeout,
queueable,
resourceWeight);
} else {
// Use retry logic
return executeWithRetries(
joinPoint,
args,
async,
timeout,
retryCount,
trackProgress,
queueable,
resourceWeight);
}
}
private Object executeWithRetries(
ProceedingJoinPoint joinPoint,
Object[] args,
boolean async,
long timeout,
int maxRetries,
boolean trackProgress,
boolean queueable,
int resourceWeight) {
// Keep jobId reference for progress tracking in TaskManager
AtomicReference<String> jobIdRef = new AtomicReference<>();
return jobExecutorService.runJobGeneric(
async,
() -> {
// Use iterative approach instead of recursion to avoid stack overflow
Throwable lastException = null;
// Attempt counter starts at 1 for first try
for (int currentAttempt = 1; currentAttempt <= maxRetries; currentAttempt++) {
try {
if (trackProgress && async) {
// Get jobId for progress tracking in TaskManager
// This enables REST API progress queries, not WebSocket
if (jobIdRef.get() == null) {
jobIdRef.set(getJobIdFromContext());
}
String jobId = jobIdRef.get();
if (jobId != null) {
log.debug(
"Tracking progress for job {} (attempt {}/{})",
jobId,
currentAttempt,
maxRetries);
// Progress is tracked in TaskManager for REST API access
// No WebSocket notifications sent here
}
}
// Attempt to execute the operation
return joinPoint.proceed(args);
} catch (Throwable ex) {
lastException = ex;
log.error(
"AutoJobAspect caught exception during job execution (attempt {}/{}): {}",
currentAttempt,
maxRetries,
ex.getMessage(),
ex);
// Check if we should retry
if (currentAttempt < maxRetries) {
log.info(
"Retrying operation, attempt {}/{}",
currentAttempt + 1,
maxRetries);
if (trackProgress && async) {
String jobId = jobIdRef.get();
if (jobId != null) {
log.debug(
"Recording retry attempt for job {} in TaskManager",
jobId);
// Retry info is tracked in TaskManager for REST API access
}
}
// Use non-blocking delay for all retry attempts to avoid blocking
// threads
// For sync jobs this avoids starving the tomcat thread pool under
// load
long delayMs = RETRY_BASE_DELAY.toMillis() * currentAttempt;
// Execute the retry after a delay through the JobExecutorService
// rather than blocking the current thread with sleep
CompletableFuture<Object> delayedRetry = new CompletableFuture<>();
// Use a delayed executor for non-blocking delay
CompletableFuture.delayedExecutor(delayMs, TimeUnit.MILLISECONDS)
.execute(
() -> {
// Continue the retry loop in the next iteration
// We can't return from here directly since
// we're in a Runnable
delayedRetry.complete(null);
});
// Wait for the delay to complete before continuing
try {
delayedRetry.join();
} catch (Exception e) {
Thread.currentThread().interrupt();
break;
}
} else {
// No more retries, we'll throw the exception after the loop
break;
}
}
}
// If we get here, all retries failed
if (lastException != null) {
throw new RuntimeException(
"Job failed after "
+ maxRetries
+ " attempts: "
+ lastException.getMessage(),
lastException);
}
// This should never happen if lastException is properly tracked
throw new RuntimeException("Job failed but no exception was recorded");
},
timeout,
queueable,
resourceWeight);
}
/**
* Processes arguments in-place to handle file resolution and async file persistence. This
* approach avoids type mismatch issues by modifying the original objects directly.
*
* @param originalArgs The original arguments
* @param async Whether this is an async operation
* @return The original array with processed arguments
*/
private Object[] processArgsInPlace(Object[] originalArgs, boolean async) {
if (originalArgs == null || originalArgs.length == 0) {
return originalArgs;
}
// Process all arguments in-place
for (int i = 0; i < originalArgs.length; i++) {
Object arg = originalArgs[i];
if (arg instanceof PDFFile pdfFile) {
// Case 1: fileId is provided but no fileInput
if (pdfFile.getFileInput() == null && pdfFile.getFileId() != null) {
try {
log.debug("Using fileId {} to get file content", pdfFile.getFileId());
MultipartFile file = fileStorage.retrieveFile(pdfFile.getFileId());
pdfFile.setFileInput(file);
} catch (Exception e) {
throw new RuntimeException(
"Failed to resolve file by ID: " + pdfFile.getFileId(), e);
}
}
// Case 2: For async requests, we need to make a copy of the MultipartFile
else if (async && pdfFile.getFileInput() != null) {
try {
log.debug("Making persistent copy of uploaded file for async processing");
MultipartFile originalFile = pdfFile.getFileInput();
String fileId = fileStorage.storeFile(originalFile);
// Store the fileId for later reference
pdfFile.setFileId(fileId);
// Replace the original MultipartFile with our persistent copy
MultipartFile persistentFile = fileStorage.retrieveFile(fileId);
pdfFile.setFileInput(persistentFile);
log.debug("Created persistent file copy with fileId: {}", fileId);
} catch (IOException e) {
throw new RuntimeException(
"Failed to create persistent copy of uploaded file", e);
}
}
}
}
return originalArgs;
}
private String getJobIdFromContext() {
try {
return (String) request.getAttribute("jobId");
} catch (Exception e) {
log.debug("Could not retrieve job ID from context: {}", e.getMessage());
return null;
}
}
}

View File

@ -1,59 +0,0 @@
package stirling.software.common.config;
import java.nio.file.Files;
import java.nio.file.Path;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.ApplicationProperties;
import stirling.software.common.util.TempFileRegistry;
/**
* Configuration for the temporary file management system. Sets up the necessary beans and
* configures system properties.
*/
@Slf4j
@Configuration
@RequiredArgsConstructor
public class TempFileConfiguration {
private final ApplicationProperties applicationProperties;
/**
* Create the TempFileRegistry bean.
*
* @return A new TempFileRegistry instance
*/
@Bean
public TempFileRegistry tempFileRegistry() {
return new TempFileRegistry();
}
@PostConstruct
public void initTempFileConfig() {
try {
ApplicationProperties.TempFileManagement tempFiles =
applicationProperties.getSystem().getTempFileManagement();
String customTempDirectory = tempFiles.getBaseTmpDir();
// Create the temp directory if it doesn't exist
Path tempDir = Path.of(customTempDirectory);
if (!Files.exists(tempDir)) {
Files.createDirectories(tempDir);
log.info("Created temporary directory: {}", tempDir);
}
log.debug("Temporary file configuration initialized");
log.debug("Using temp directory: {}", customTempDirectory);
log.debug("Temp file prefix: {}", tempFiles.getPrefix());
} catch (Exception e) {
log.error("Failed to initialize temporary file configuration", e);
}
}
}

View File

@ -1,82 +0,0 @@
package stirling.software.common.config;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Set;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.stereotype.Component;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.TempFileRegistry;
/**
* Handles cleanup of temporary files on application shutdown. Implements Spring's DisposableBean
* interface to ensure cleanup happens during normal application shutdown.
*/
@Slf4j
@Component
public class TempFileShutdownHook implements DisposableBean {
private final TempFileRegistry registry;
public TempFileShutdownHook(TempFileRegistry registry) {
this.registry = registry;
// Register a JVM shutdown hook as a backup in case Spring's
// DisposableBean mechanism doesn't trigger (e.g., during a crash)
Runtime.getRuntime().addShutdownHook(new Thread(this::cleanupTempFiles));
}
/** Spring's DisposableBean interface method. Called during normal application shutdown. */
@Override
public void destroy() {
log.info("Application shutting down, cleaning up temporary files");
cleanupTempFiles();
}
/** Clean up all registered temporary files and directories. */
private void cleanupTempFiles() {
try {
// Clean up all registered files
Set<Path> files = registry.getAllRegisteredFiles();
int deletedCount = 0;
for (Path file : files) {
try {
if (Files.exists(file)) {
Files.deleteIfExists(file);
deletedCount++;
}
} catch (IOException e) {
log.warn("Failed to delete temp file during shutdown: {}", file, e);
}
}
// Clean up all registered directories
Set<Path> directories = registry.getTempDirectories();
for (Path dir : directories) {
try {
if (Files.exists(dir)) {
GeneralUtils.deleteDirectory(dir);
deletedCount++;
}
} catch (IOException e) {
log.warn("Failed to delete temp directory during shutdown: {}", dir, e);
}
}
log.info(
"Shutdown cleanup complete. Deleted {} temporary files/directories",
deletedCount);
// Clear the registry
registry.clear();
} catch (Exception e) {
log.error("Error during shutdown cleanup", e);
}
}
}

View File

@ -1,25 +0,0 @@
package stirling.software.common.model.api;
import org.springframework.web.multipart.MultipartFile;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@EqualsAndHashCode
public class PDFFile {
@Schema(
description = "The input PDF file",
contentMediaType = "application/pdf",
format = "binary")
private MultipartFile fileInput;
@Schema(
description = "File ID for server-side files (can be used instead of fileInput)",
example = "a1b2c3d4-5678-90ab-cdef-ghijklmnopqr")
private String fileId;
}

View File

@ -1,41 +0,0 @@
package stirling.software.common.model.api.converters;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import stirling.software.common.model.api.PDFFile;
@Data
@EqualsAndHashCode(callSuper = true)
public class EmlToPdfRequest extends PDFFile {
// fileInput is inherited from PDFFile
@Schema(
description = "Include email attachments in the PDF output",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
example = "false")
private boolean includeAttachments = false;
@Schema(
description = "Maximum attachment size in MB to include (default 10MB, range: 1-100)",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
example = "10",
minimum = "1",
maximum = "100")
private int maxAttachmentSizeMB = 10;
@Schema(
description = "Download HTML intermediate file instead of PDF",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
example = "false")
private boolean downloadHtml = false;
@Schema(
description = "Include CC and BCC recipients in header (if available)",
requiredMode = Schema.RequiredMode.NOT_REQUIRED,
example = "true")
private boolean includeAllRecipients = true;
}

View File

@ -1,7 +0,0 @@
package stirling.software.common.model.exception;
public class UnsupportedClaimException extends RuntimeException {
public UnsupportedClaimException(String message) {
super(message);
}
}

View File

@ -1,15 +0,0 @@
package stirling.software.common.model.job;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class JobProgress {
private String jobId;
private String status;
private int percentComplete;
private String message;
}

View File

@ -1,14 +0,0 @@
package stirling.software.common.model.job;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class JobResponse<T> {
private boolean async;
private String jobId;
private T result;
}

View File

@ -1,164 +0,0 @@
package stirling.software.common.model.job;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/** Represents the result of a job execution. Used by the TaskManager to store job results. */
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class JobResult {
/** The job ID */
private String jobId;
/** Flag indicating if the job is complete */
private boolean complete;
/** Error message if the job failed */
private String error;
/** List of result files for jobs that produce files */
@JsonIgnore private List<ResultFile> resultFiles;
/** Time when the job was created */
private LocalDateTime createdAt;
/** Time when the job was completed */
private LocalDateTime completedAt;
/** The actual result object, if not a file */
private Object result;
/**
* Notes attached to this job for tracking purposes. Uses CopyOnWriteArrayList for thread safety
* when notes are added concurrently.
*/
private final List<String> notes = new CopyOnWriteArrayList<>();
/**
* Create a new JobResult with the given job ID
*
* @param jobId The job ID
* @return A new JobResult
*/
public static JobResult createNew(String jobId) {
return JobResult.builder()
.jobId(jobId)
.complete(false)
.createdAt(LocalDateTime.now())
.build();
}
/**
* Mark this job as complete with a general result
*
* @param result The result object
*/
public void completeWithResult(Object result) {
this.complete = true;
this.result = result;
this.completedAt = LocalDateTime.now();
}
/**
* Mark this job as failed with an error message
*
* @param error The error message
*/
public void failWithError(String error) {
this.complete = true;
this.error = error;
this.completedAt = LocalDateTime.now();
}
/**
* Mark this job as complete with multiple file results
*
* @param resultFiles The list of result files
*/
public void completeWithFiles(List<ResultFile> resultFiles) {
this.complete = true;
this.resultFiles = new ArrayList<>(resultFiles);
this.completedAt = LocalDateTime.now();
}
/**
* Mark this job as complete with a single file result (convenience method)
*
* @param fileId The file ID of the result
* @param fileName The file name
* @param contentType The content type of the file
* @param fileSize The size of the file in bytes
*/
public void completeWithSingleFile(
String fileId, String fileName, String contentType, long fileSize) {
ResultFile resultFile =
ResultFile.builder()
.fileId(fileId)
.fileName(fileName)
.contentType(contentType)
.fileSize(fileSize)
.build();
completeWithFiles(List.of(resultFile));
}
/**
* Check if this job has file results
*
* @return true if this job has file results, false otherwise
*/
public boolean hasFiles() {
return resultFiles != null && !resultFiles.isEmpty();
}
/**
* Check if this job has multiple file results
*
* @return true if this job has multiple file results, false otherwise
*/
public boolean hasMultipleFiles() {
return resultFiles != null && resultFiles.size() > 1;
}
/**
* Get all result files
*
* @return List of result files
*/
public List<ResultFile> getAllResultFiles() {
if (resultFiles != null && !resultFiles.isEmpty()) {
return Collections.unmodifiableList(resultFiles);
}
return Collections.emptyList();
}
/**
* Add a note to this job
*
* @param note The note to add
*/
public void addNote(String note) {
this.notes.add(note);
}
/**
* Get all notes attached to this job
*
* @return An unmodifiable view of the notes list
*/
public List<String> getNotes() {
return Collections.unmodifiableList(notes);
}
}

View File

@ -1,43 +0,0 @@
package stirling.software.common.model.job;
import java.time.LocalDateTime;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/** Represents statistics about jobs in the system */
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class JobStats {
/** Total number of jobs (active and completed) */
private int totalJobs;
/** Number of active (incomplete) jobs */
private int activeJobs;
/** Number of completed jobs */
private int completedJobs;
/** Number of failed jobs */
private int failedJobs;
/** Number of successful jobs */
private int successfulJobs;
/** Number of jobs with file results */
private int fileResultJobs;
/** The oldest active job's creation timestamp */
private LocalDateTime oldestActiveJobTime;
/** The newest active job's creation timestamp */
private LocalDateTime newestActiveJobTime;
/** The average processing time for completed jobs in milliseconds */
private long averageProcessingTimeMs;
}

View File

@ -1,26 +0,0 @@
package stirling.software.common.model.job;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/** Represents a single file result from a job execution */
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ResultFile {
/** The file ID for accessing the file */
private String fileId;
/** The original file name */
private String fileName;
/** MIME type of the file */
private String contentType;
/** Size of the file in bytes */
private long fileSize;
}

View File

@ -1,78 +0,0 @@
package stirling.software.common.service;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.file.*;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
public class FileOrUploadService {
@Value("${stirling.tempDir:/tmp/stirling-files}")
private String tempDirPath;
public Path resolveFilePath(String fileId) {
return Path.of(tempDirPath).resolve(fileId);
}
public MultipartFile toMockMultipartFile(String name, byte[] data) throws IOException {
return new CustomMultipartFile(name, data);
}
// Custom implementation of MultipartFile
private static class CustomMultipartFile implements MultipartFile {
private final String name;
private final byte[] content;
public CustomMultipartFile(String name, byte[] content) {
this.name = name;
this.content = content;
}
@Override
public String getName() {
return name;
}
@Override
public String getOriginalFilename() {
return name;
}
@Override
public String getContentType() {
return "application/pdf";
}
@Override
public boolean isEmpty() {
return content == null || content.length == 0;
}
@Override
public long getSize() {
return content.length;
}
@Override
public byte[] getBytes() throws IOException {
return content;
}
@Override
public java.io.InputStream getInputStream() throws IOException {
return new ByteArrayInputStream(content);
}
@Override
public void transferTo(java.io.File dest) throws IOException, IllegalStateException {
Files.write(dest.toPath(), content);
}
}
}

View File

@ -1,184 +0,0 @@
package stirling.software.common.service;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.UUID;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
/**
* Service for storing and retrieving files with unique file IDs. Used by the AutoJobPostMapping
* system to handle file references.
*/
@Service
@RequiredArgsConstructor
@Slf4j
public class FileStorage {
@Value("${stirling.tempDir:/tmp/stirling-files}")
private String tempDirPath;
private final FileOrUploadService fileOrUploadService;
/**
* Store a file and return its unique ID
*
* @param file The file to store
* @return The unique ID assigned to the file
* @throws IOException If there is an error storing the file
*/
public String storeFile(MultipartFile file) throws IOException {
String fileId = generateFileId();
Path filePath = getFilePath(fileId);
// Ensure the directory exists
Files.createDirectories(filePath.getParent());
// Transfer the file to the storage location
file.transferTo(filePath.toFile());
log.debug("Stored file with ID: {}", fileId);
return fileId;
}
/**
* Store a byte array as a file and return its unique ID
*
* @param bytes The byte array to store
* @param originalName The original name of the file (for extension)
* @return The unique ID assigned to the file
* @throws IOException If there is an error storing the file
*/
public String storeBytes(byte[] bytes, String originalName) throws IOException {
String fileId = generateFileId();
Path filePath = getFilePath(fileId);
// Ensure the directory exists
Files.createDirectories(filePath.getParent());
// Write the bytes to the file
Files.write(filePath, bytes);
log.debug("Stored byte array with ID: {}", fileId);
return fileId;
}
/**
* Retrieve a file by its ID as a MultipartFile
*
* @param fileId The ID of the file to retrieve
* @return The file as a MultipartFile
* @throws IOException If the file doesn't exist or can't be read
*/
public MultipartFile retrieveFile(String fileId) throws IOException {
Path filePath = getFilePath(fileId);
if (!Files.exists(filePath)) {
throw new IOException("File not found with ID: " + fileId);
}
byte[] fileData = Files.readAllBytes(filePath);
return fileOrUploadService.toMockMultipartFile(fileId, fileData);
}
/**
* Retrieve a file by its ID as a byte array
*
* @param fileId The ID of the file to retrieve
* @return The file as a byte array
* @throws IOException If the file doesn't exist or can't be read
*/
public byte[] retrieveBytes(String fileId) throws IOException {
Path filePath = getFilePath(fileId);
if (!Files.exists(filePath)) {
throw new IOException("File not found with ID: " + fileId);
}
return Files.readAllBytes(filePath);
}
/**
* Delete a file by its ID
*
* @param fileId The ID of the file to delete
* @return true if the file was deleted, false otherwise
*/
public boolean deleteFile(String fileId) {
try {
Path filePath = getFilePath(fileId);
return Files.deleteIfExists(filePath);
} catch (IOException e) {
log.error("Error deleting file with ID: {}", fileId, e);
return false;
}
}
/**
* Check if a file exists by its ID
*
* @param fileId The ID of the file to check
* @return true if the file exists, false otherwise
*/
public boolean fileExists(String fileId) {
Path filePath = getFilePath(fileId);
return Files.exists(filePath);
}
/**
* Get the size of a file by its ID without loading the content into memory
*
* @param fileId The ID of the file
* @return The size of the file in bytes
* @throws IOException If the file doesn't exist or can't be read
*/
public long getFileSize(String fileId) throws IOException {
Path filePath = getFilePath(fileId);
if (!Files.exists(filePath)) {
throw new IOException("File not found with ID: " + fileId);
}
return Files.size(filePath);
}
/**
* Get the path for a file ID
*
* @param fileId The ID of the file
* @return The path to the file
* @throws IllegalArgumentException if fileId contains path traversal characters or resolves
* outside base directory
*/
private Path getFilePath(String fileId) {
// Validate fileId to prevent path traversal
if (fileId.contains("..") || fileId.contains("/") || fileId.contains("\\")) {
throw new IllegalArgumentException("Invalid file ID");
}
Path basePath = Path.of(tempDirPath).normalize().toAbsolutePath();
Path resolvedPath = basePath.resolve(fileId).normalize();
// Ensure resolved path is within the base directory
if (!resolvedPath.startsWith(basePath)) {
throw new IllegalArgumentException("File ID resolves to an invalid path");
}
return resolvedPath;
}
/**
* Generate a unique file ID
*
* @return A unique file ID
*/
private String generateFileId() {
return UUID.randomUUID().toString();
}
}

View File

@ -1,476 +0,0 @@
package stirling.software.common.service;
import java.io.IOException;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Supplier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import jakarta.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.job.JobResponse;
import stirling.software.common.util.ExecutorFactory;
/** Service for executing jobs asynchronously or synchronously */
@Service
@Slf4j
public class JobExecutorService {
private final TaskManager taskManager;
private final FileStorage fileStorage;
private final HttpServletRequest request;
private final ResourceMonitor resourceMonitor;
private final JobQueue jobQueue;
private final ExecutorService executor = ExecutorFactory.newVirtualOrCachedThreadExecutor();
private final long effectiveTimeoutMs;
public JobExecutorService(
TaskManager taskManager,
FileStorage fileStorage,
HttpServletRequest request,
ResourceMonitor resourceMonitor,
JobQueue jobQueue,
@Value("${spring.mvc.async.request-timeout:1200000}") long asyncRequestTimeoutMs,
@Value("${server.servlet.session.timeout:30m}") String sessionTimeout) {
this.taskManager = taskManager;
this.fileStorage = fileStorage;
this.request = request;
this.resourceMonitor = resourceMonitor;
this.jobQueue = jobQueue;
// Parse session timeout and calculate effective timeout once during initialization
long sessionTimeoutMs = parseSessionTimeout(sessionTimeout);
this.effectiveTimeoutMs = Math.min(asyncRequestTimeoutMs, sessionTimeoutMs);
log.debug(
"Job executor configured with effective timeout of {} ms", this.effectiveTimeoutMs);
}
/**
* Run a job either asynchronously or synchronously
*
* @param async Whether to run the job asynchronously
* @param work The work to be done
* @return The response
*/
public ResponseEntity<?> runJobGeneric(boolean async, Supplier<Object> work) {
return runJobGeneric(async, work, -1);
}
/**
* Run a job either asynchronously or synchronously with a custom timeout
*
* @param async Whether to run the job asynchronously
* @param work The work to be done
* @param customTimeoutMs Custom timeout in milliseconds, or -1 to use the default
* @return The response
*/
public ResponseEntity<?> runJobGeneric(
boolean async, Supplier<Object> work, long customTimeoutMs) {
return runJobGeneric(async, work, customTimeoutMs, false, 50);
}
/**
* Run a job either asynchronously or synchronously with custom parameters
*
* @param async Whether to run the job asynchronously
* @param work The work to be done
* @param customTimeoutMs Custom timeout in milliseconds, or -1 to use the default
* @param queueable Whether this job can be queued when system resources are limited
* @param resourceWeight The resource weight of this job (1-100)
* @return The response
*/
public ResponseEntity<?> runJobGeneric(
boolean async,
Supplier<Object> work,
long customTimeoutMs,
boolean queueable,
int resourceWeight) {
String jobId = UUID.randomUUID().toString();
// Store the job ID in the request for potential use by other components
if (request != null) {
request.setAttribute("jobId", jobId);
// Also track this job ID in the user's session for authorization purposes
// This ensures users can only cancel their own jobs
if (request.getSession() != null) {
@SuppressWarnings("unchecked")
java.util.Set<String> userJobIds =
(java.util.Set<String>) request.getSession().getAttribute("userJobIds");
if (userJobIds == null) {
userJobIds = new java.util.concurrent.ConcurrentSkipListSet<>();
request.getSession().setAttribute("userJobIds", userJobIds);
}
userJobIds.add(jobId);
log.debug("Added job ID {} to user session", jobId);
}
}
// Determine which timeout to use
long timeoutToUse = customTimeoutMs > 0 ? customTimeoutMs : effectiveTimeoutMs;
log.debug(
"Running job with ID: {}, async: {}, timeout: {}ms, queueable: {}, weight: {}",
jobId,
async,
timeoutToUse,
queueable,
resourceWeight);
// Check if we need to queue this job based on resource availability
boolean shouldQueue =
queueable
&& async
&& // Only async jobs can be queued
resourceMonitor.shouldQueueJob(resourceWeight);
if (shouldQueue) {
// Queue the job instead of executing immediately
log.debug(
"Queueing job {} due to resource constraints (weight: {})",
jobId,
resourceWeight);
taskManager.createTask(jobId);
// Create a specialized wrapper that updates the TaskManager
Supplier<Object> wrappedWork =
() -> {
try {
Object result = work.get();
processJobResult(jobId, result);
return result;
} catch (Exception e) {
log.error(
"Error executing queued job {}: {}", jobId, e.getMessage(), e);
taskManager.setError(jobId, e.getMessage());
throw e;
}
};
// Queue the job and get the future
CompletableFuture<ResponseEntity<?>> future =
jobQueue.queueJob(jobId, resourceWeight, wrappedWork, timeoutToUse);
// Return immediately with job ID
return ResponseEntity.ok().body(new JobResponse<>(true, jobId, null));
} else if (async) {
taskManager.createTask(jobId);
executor.execute(
() -> {
try {
log.debug(
"Running async job {} with timeout {} ms", jobId, timeoutToUse);
// Execute with timeout
Object result = executeWithTimeout(() -> work.get(), timeoutToUse);
processJobResult(jobId, result);
} catch (TimeoutException te) {
log.error("Job {} timed out after {} ms", jobId, timeoutToUse);
taskManager.setError(jobId, "Job timed out");
} catch (Exception e) {
log.error("Error executing job {}: {}", jobId, e.getMessage(), e);
taskManager.setError(jobId, e.getMessage());
}
});
return ResponseEntity.ok().body(new JobResponse<>(true, jobId, null));
} else {
try {
log.debug("Running sync job with timeout {} ms", timeoutToUse);
// Execute with timeout
Object result = executeWithTimeout(() -> work.get(), timeoutToUse);
// If the result is already a ResponseEntity, return it directly
if (result instanceof ResponseEntity) {
return (ResponseEntity<?>) result;
}
// Process different result types
return handleResultForSyncJob(result);
} catch (TimeoutException te) {
log.error("Synchronous job timed out after {} ms", timeoutToUse);
return ResponseEntity.internalServerError()
.body(Map.of("error", "Job timed out after " + timeoutToUse + " ms"));
} catch (Exception e) {
log.error("Error executing synchronous job: {}", e.getMessage(), e);
// Construct a JSON error response
return ResponseEntity.internalServerError()
.body(Map.of("error", "Job failed: " + e.getMessage()));
}
}
}
/**
* Process the result of an asynchronous job
*
* @param jobId The job ID
* @param result The result
*/
private void processJobResult(String jobId, Object result) {
try {
if (result instanceof byte[]) {
// Store byte array directly to disk to avoid double memory consumption
String fileId = fileStorage.storeBytes((byte[]) result, "result.pdf");
taskManager.setFileResult(jobId, fileId, "result.pdf", "application/pdf");
log.debug("Stored byte[] result with fileId: {}", fileId);
// Let the byte array get collected naturally in the next GC cycle
// We don't need to force System.gc() which can be harmful
} else if (result instanceof ResponseEntity) {
ResponseEntity<?> response = (ResponseEntity<?>) result;
Object body = response.getBody();
if (body instanceof byte[]) {
// Extract filename from content-disposition header if available
String filename = "result.pdf";
String contentType = "application/pdf";
if (response.getHeaders().getContentDisposition() != null) {
String disposition =
response.getHeaders().getContentDisposition().toString();
if (disposition.contains("filename=")) {
filename =
disposition.substring(
disposition.indexOf("filename=") + 9,
disposition.lastIndexOf("\""));
}
}
if (response.getHeaders().getContentType() != null) {
contentType = response.getHeaders().getContentType().toString();
}
// Store byte array directly to disk
String fileId = fileStorage.storeBytes((byte[]) body, filename);
taskManager.setFileResult(jobId, fileId, filename, contentType);
log.debug("Stored ResponseEntity<byte[]> result with fileId: {}", fileId);
// Let the GC handle the memory naturally
} else {
// Check if the response body contains a fileId
if (body != null && body.toString().contains("fileId")) {
try {
// Try to extract fileId using reflection
java.lang.reflect.Method getFileId =
body.getClass().getMethod("getFileId");
String fileId = (String) getFileId.invoke(body);
if (fileId != null && !fileId.isEmpty()) {
// Try to get filename and content type
String filename = "result.pdf";
String contentType = "application/pdf";
try {
java.lang.reflect.Method getOriginalFileName =
body.getClass().getMethod("getOriginalFilename");
String origName = (String) getOriginalFileName.invoke(body);
if (origName != null && !origName.isEmpty()) {
filename = origName;
}
} catch (Exception e) {
log.debug(
"Could not get original filename: {}", e.getMessage());
}
try {
java.lang.reflect.Method getContentType =
body.getClass().getMethod("getContentType");
String ct = (String) getContentType.invoke(body);
if (ct != null && !ct.isEmpty()) {
contentType = ct;
}
} catch (Exception e) {
log.debug("Could not get content type: {}", e.getMessage());
}
taskManager.setFileResult(jobId, fileId, filename, contentType);
log.debug("Extracted fileId from response body: {}", fileId);
taskManager.setComplete(jobId);
return;
}
} catch (Exception e) {
log.debug(
"Failed to extract fileId from response body: {}",
e.getMessage());
}
}
// Store generic result
taskManager.setResult(jobId, body);
}
} else if (result instanceof MultipartFile) {
MultipartFile file = (MultipartFile) result;
String fileId = fileStorage.storeFile(file);
taskManager.setFileResult(
jobId, fileId, file.getOriginalFilename(), file.getContentType());
log.debug("Stored MultipartFile result with fileId: {}", fileId);
} else {
// Check if result has a fileId field
if (result != null) {
try {
// Try to extract fileId using reflection
java.lang.reflect.Method getFileId =
result.getClass().getMethod("getFileId");
String fileId = (String) getFileId.invoke(result);
if (fileId != null && !fileId.isEmpty()) {
// Try to get filename and content type
String filename = "result.pdf";
String contentType = "application/pdf";
try {
java.lang.reflect.Method getOriginalFileName =
result.getClass().getMethod("getOriginalFilename");
String origName = (String) getOriginalFileName.invoke(result);
if (origName != null && !origName.isEmpty()) {
filename = origName;
}
} catch (Exception e) {
log.debug("Could not get original filename: {}", e.getMessage());
}
try {
java.lang.reflect.Method getContentType =
result.getClass().getMethod("getContentType");
String ct = (String) getContentType.invoke(result);
if (ct != null && !ct.isEmpty()) {
contentType = ct;
}
} catch (Exception e) {
log.debug("Could not get content type: {}", e.getMessage());
}
taskManager.setFileResult(jobId, fileId, filename, contentType);
log.debug("Extracted fileId from result object: {}", fileId);
taskManager.setComplete(jobId);
return;
}
} catch (Exception e) {
log.debug(
"Failed to extract fileId from result object: {}", e.getMessage());
}
}
// Default case: store the result as is
taskManager.setResult(jobId, result);
}
taskManager.setComplete(jobId);
} catch (Exception e) {
log.error("Error processing job result: {}", e.getMessage(), e);
taskManager.setError(jobId, "Error processing result: " + e.getMessage());
}
}
/**
* Handle different result types for synchronous jobs
*
* @param result The result object
* @return The appropriate ResponseEntity
* @throws IOException If there is an error processing the result
*/
private ResponseEntity<?> handleResultForSyncJob(Object result) throws IOException {
if (result instanceof byte[]) {
// Return byte array as PDF
return ResponseEntity.ok()
.contentType(MediaType.APPLICATION_PDF)
.header(
HttpHeaders.CONTENT_DISPOSITION,
"form-data; name=\"attachment\"; filename=\"result.pdf\"")
.body(result);
} else if (result instanceof MultipartFile) {
// Return MultipartFile content
MultipartFile file = (MultipartFile) result;
return ResponseEntity.ok()
.contentType(MediaType.parseMediaType(file.getContentType()))
.header(
HttpHeaders.CONTENT_DISPOSITION,
"form-data; name=\"attachment\"; filename=\""
+ file.getOriginalFilename()
+ "\"")
.body(file.getBytes());
} else {
// Default case: return as JSON
return ResponseEntity.ok(result);
}
}
/**
* Parse session timeout string (e.g., "30m", "1h") to milliseconds
*
* @param timeout The timeout string
* @return The timeout in milliseconds
*/
private long parseSessionTimeout(String timeout) {
if (timeout == null || timeout.isEmpty()) {
return 30 * 60 * 1000; // Default: 30 minutes
}
try {
String value = timeout.replaceAll("[^\\d.]", "");
String unit = timeout.replaceAll("[\\d.]", "");
double numericValue = Double.parseDouble(value);
return switch (unit.toLowerCase()) {
case "s" -> (long) (numericValue * 1000);
case "m" -> (long) (numericValue * 60 * 1000);
case "h" -> (long) (numericValue * 60 * 60 * 1000);
case "d" -> (long) (numericValue * 24 * 60 * 60 * 1000);
default -> (long) (numericValue * 60 * 1000); // Default to minutes
};
} catch (Exception e) {
log.warn("Could not parse session timeout '{}', using default", timeout);
return 30 * 60 * 1000; // Default: 30 minutes
}
}
/**
* Execute a supplier with a timeout
*
* @param supplier The supplier to execute
* @param timeoutMs The timeout in milliseconds
* @return The result from the supplier
* @throws TimeoutException If the execution times out
* @throws Exception If the supplier throws an exception
*/
private <T> T executeWithTimeout(Supplier<T> supplier, long timeoutMs)
throws TimeoutException, Exception {
// Use the same executor as other async jobs for consistency
// This ensures all operations run on the same thread pool
java.util.concurrent.CompletableFuture<T> future =
java.util.concurrent.CompletableFuture.supplyAsync(supplier, executor);
try {
return future.get(timeoutMs, TimeUnit.MILLISECONDS);
} catch (java.util.concurrent.TimeoutException e) {
future.cancel(true);
throw new TimeoutException("Execution timed out after " + timeoutMs + " ms");
} catch (java.util.concurrent.ExecutionException e) {
throw (Exception) e.getCause();
} catch (java.util.concurrent.CancellationException e) {
throw new Exception("Execution was cancelled", e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new Exception("Execution was interrupted", e);
}
}
}

View File

@ -1,495 +0,0 @@
package stirling.software.common.service;
import java.time.Instant;
import java.util.Map;
import java.util.concurrent.*;
import java.util.function.Supplier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.SmartLifecycle;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.util.ExecutorFactory;
import stirling.software.common.util.SpringContextHolder;
/**
* Manages a queue of jobs with dynamic sizing based on system resources. Used when system resources
* are limited to prevent overloading.
*/
@Service
@Slf4j
public class JobQueue implements SmartLifecycle {
private volatile boolean running = false;
private final ResourceMonitor resourceMonitor;
@Value("${stirling.job.queue.base-capacity:10}")
private int baseQueueCapacity = 10;
@Value("${stirling.job.queue.min-capacity:2}")
private int minQueueCapacity = 2;
@Value("${stirling.job.queue.check-interval-ms:1000}")
private long queueCheckIntervalMs = 1000;
@Value("${stirling.job.queue.max-wait-time-ms:600000}")
private long maxWaitTimeMs = 600000; // 10 minutes
private volatile BlockingQueue<QueuedJob> jobQueue;
private final Map<String, QueuedJob> jobMap = new ConcurrentHashMap<>();
private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor();
private final ExecutorService jobExecutor = ExecutorFactory.newVirtualOrCachedThreadExecutor();
private final Object queueLock = new Object(); // Lock for synchronizing queue operations
private boolean shuttingDown = false;
@Getter private int rejectedJobs = 0;
@Getter private int totalQueuedJobs = 0;
@Getter private int currentQueueSize = 0;
/** Represents a job waiting in the queue. */
@Data
@AllArgsConstructor
private static class QueuedJob {
private final String jobId;
private final int resourceWeight;
private final Supplier<Object> work;
private final long timeoutMs;
private final Instant queuedAt;
private CompletableFuture<ResponseEntity<?>> future;
private volatile boolean cancelled = false;
}
public JobQueue(ResourceMonitor resourceMonitor) {
this.resourceMonitor = resourceMonitor;
// Initialize with dynamic capacity
int capacity =
resourceMonitor.calculateDynamicQueueCapacity(baseQueueCapacity, minQueueCapacity);
this.jobQueue = new LinkedBlockingQueue<>(capacity);
}
// Remove @PostConstruct to let SmartLifecycle control startup
private void initializeSchedulers() {
log.debug(
"Starting job queue with base capacity {}, min capacity {}",
baseQueueCapacity,
minQueueCapacity);
// Periodically process the job queue
scheduler.scheduleWithFixedDelay(
this::processQueue, 0, queueCheckIntervalMs, TimeUnit.MILLISECONDS);
// Periodically update queue capacity based on resource usage
scheduler.scheduleWithFixedDelay(
this::updateQueueCapacity,
10000, // Initial delay
30000, // 30 second interval
TimeUnit.MILLISECONDS);
}
// Remove @PreDestroy to let SmartLifecycle control shutdown
private void shutdownSchedulers() {
log.info("Shutting down job queue");
shuttingDown = true;
// Complete any futures that are still waiting
jobMap.forEach(
(id, job) -> {
if (!job.future.isDone()) {
job.future.completeExceptionally(
new RuntimeException("Server shutting down, job cancelled"));
}
});
// Shutdown schedulers and wait for termination
try {
scheduler.shutdown();
if (!scheduler.awaitTermination(5, TimeUnit.SECONDS)) {
scheduler.shutdownNow();
}
jobExecutor.shutdown();
if (!jobExecutor.awaitTermination(5, TimeUnit.SECONDS)) {
jobExecutor.shutdownNow();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
scheduler.shutdownNow();
jobExecutor.shutdownNow();
}
log.info(
"Job queue shutdown complete. Stats: total={}, rejected={}",
totalQueuedJobs,
rejectedJobs);
}
// SmartLifecycle methods
@Override
public void start() {
log.info("Starting JobQueue lifecycle");
if (!running) {
initializeSchedulers();
running = true;
}
}
@Override
public void stop() {
log.info("Stopping JobQueue lifecycle");
shutdownSchedulers();
running = false;
}
@Override
public boolean isRunning() {
return running;
}
@Override
public int getPhase() {
// Start earlier than most components, but shutdown later
return 10;
}
@Override
public boolean isAutoStartup() {
return true;
}
/**
* Queues a job for execution when resources permit.
*
* @param jobId The job ID
* @param resourceWeight The resource weight of the job (1-100)
* @param work The work to be done
* @param timeoutMs The timeout in milliseconds
* @return A CompletableFuture that will complete when the job is executed
*/
public CompletableFuture<ResponseEntity<?>> queueJob(
String jobId, int resourceWeight, Supplier<Object> work, long timeoutMs) {
// Create a CompletableFuture to track this job's completion
CompletableFuture<ResponseEntity<?>> future = new CompletableFuture<>();
// Create the queued job
QueuedJob job =
new QueuedJob(jobId, resourceWeight, work, timeoutMs, Instant.now(), future, false);
// Store in our map for lookup
jobMap.put(jobId, job);
// Update stats
totalQueuedJobs++;
// Synchronize access to the queue
synchronized (queueLock) {
currentQueueSize = jobQueue.size();
// Try to add to the queue
try {
boolean added = jobQueue.offer(job, 5, TimeUnit.SECONDS);
if (!added) {
log.warn("Queue full, rejecting job {}", jobId);
rejectedJobs++;
future.completeExceptionally(
new RuntimeException("Job queue full, please try again later"));
jobMap.remove(jobId);
return future;
}
log.debug(
"Job {} queued for execution (weight: {}, queue size: {})",
jobId,
resourceWeight,
jobQueue.size());
return future;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
future.completeExceptionally(new RuntimeException("Job queue interrupted"));
jobMap.remove(jobId);
return future;
}
}
}
/**
* Gets the current capacity of the job queue.
*
* @return The current capacity
*/
public int getQueueCapacity() {
synchronized (queueLock) {
return ((LinkedBlockingQueue<QueuedJob>) jobQueue).remainingCapacity()
+ jobQueue.size();
}
}
/** Updates the capacity of the job queue based on available system resources. */
private void updateQueueCapacity() {
try {
// Calculate new capacity once and cache the result
int newCapacity =
resourceMonitor.calculateDynamicQueueCapacity(
baseQueueCapacity, minQueueCapacity);
int currentCapacity = getQueueCapacity();
if (newCapacity != currentCapacity) {
log.debug(
"Updating job queue capacity from {} to {}", currentCapacity, newCapacity);
synchronized (queueLock) {
// Double-check that capacity still needs to be updated
// Use the cached currentCapacity to avoid calling getQueueCapacity() again
if (newCapacity != currentCapacity) {
// Create new queue with updated capacity
BlockingQueue<QueuedJob> newQueue = new LinkedBlockingQueue<>(newCapacity);
// Transfer jobs from old queue to new queue
jobQueue.drainTo(newQueue);
jobQueue = newQueue;
currentQueueSize = jobQueue.size();
}
}
}
} catch (Exception e) {
log.error("Error updating queue capacity: {}", e.getMessage(), e);
}
}
/** Processes jobs in the queue, executing them when resources permit. */
private void processQueue() {
// Jobs to execute after releasing the lock
java.util.List<QueuedJob> jobsToExecute = new java.util.ArrayList<>();
// First synchronized block: poll jobs from the queue and prepare them for execution
synchronized (queueLock) {
if (shuttingDown || jobQueue.isEmpty()) {
return;
}
try {
// Get current resource status
ResourceMonitor.ResourceStatus status = resourceMonitor.getCurrentStatus().get();
// Check if we should execute any jobs
boolean canExecuteJobs = (status != ResourceMonitor.ResourceStatus.CRITICAL);
if (!canExecuteJobs) {
// Under critical load, don't execute any jobs
log.debug("System under critical load, delaying job execution");
return;
}
// Get jobs from the queue, up to a limit based on resource availability
int jobsToProcess =
Math.max(
1,
switch (status) {
case OK -> 3;
case WARNING -> 1;
case CRITICAL -> 0;
});
for (int i = 0; i < jobsToProcess && !jobQueue.isEmpty(); i++) {
QueuedJob job = jobQueue.poll();
if (job == null) break;
// Check if it's been waiting too long
long waitTimeMs = Instant.now().toEpochMilli() - job.queuedAt.toEpochMilli();
if (waitTimeMs > maxWaitTimeMs) {
log.warn(
"Job {} exceeded maximum wait time ({} ms), executing anyway",
job.jobId,
waitTimeMs);
// Add a specific status to the job context that can be tracked
// This will be visible in the job status API
try {
TaskManager taskManager =
SpringContextHolder.getBean(TaskManager.class);
if (taskManager != null) {
taskManager.addNote(
job.jobId,
"QUEUED_TIMEOUT: Job waited in queue for "
+ (waitTimeMs / 1000)
+ " seconds, exceeding the maximum wait time of "
+ (maxWaitTimeMs / 1000)
+ " seconds.");
}
} catch (Exception e) {
log.error(
"Failed to add timeout note to job {}: {}",
job.jobId,
e.getMessage());
}
}
// Remove from our map
jobMap.remove(job.jobId);
currentQueueSize = jobQueue.size();
// Add to the list of jobs to execute outside the synchronized block
jobsToExecute.add(job);
}
} catch (Exception e) {
log.error("Error processing job queue: {}", e.getMessage(), e);
}
}
// Now execute the jobs outside the synchronized block to avoid holding the lock
for (QueuedJob job : jobsToExecute) {
executeJob(job);
}
}
/**
* Executes a job from the queue.
*
* @param job The job to execute
*/
private void executeJob(QueuedJob job) {
if (job.cancelled) {
log.debug("Job {} was cancelled, not executing", job.jobId);
return;
}
jobExecutor.execute(
() -> {
log.debug("Executing queued job {} (queued at {})", job.jobId, job.queuedAt);
try {
// Execute with timeout
Object result = executeWithTimeout(job.work, job.timeoutMs);
// Process the result
if (result instanceof ResponseEntity) {
job.future.complete((ResponseEntity<?>) result);
} else {
job.future.complete(ResponseEntity.ok(result));
}
} catch (Exception e) {
log.error(
"Error executing queued job {}: {}", job.jobId, e.getMessage(), e);
job.future.completeExceptionally(e);
}
});
}
/**
* Execute a supplier with a timeout.
*
* @param supplier The supplier to execute
* @param timeoutMs The timeout in milliseconds
* @return The result from the supplier
* @throws Exception If there is an execution error
*/
private <T> T executeWithTimeout(Supplier<T> supplier, long timeoutMs) throws Exception {
CompletableFuture<T> future = CompletableFuture.supplyAsync(supplier);
try {
if (timeoutMs <= 0) {
// No timeout
return future.join();
} else {
// With timeout
return future.get(timeoutMs, TimeUnit.MILLISECONDS);
}
} catch (TimeoutException e) {
future.cancel(true);
throw new TimeoutException("Job timed out after " + timeoutMs + "ms");
} catch (ExecutionException e) {
throw (Exception) e.getCause();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new InterruptedException("Job was interrupted");
}
}
/**
* Checks if a job is queued.
*
* @param jobId The job ID
* @return true if the job is queued
*/
public boolean isJobQueued(String jobId) {
return jobMap.containsKey(jobId);
}
/**
* Gets the current position of a job in the queue.
*
* @param jobId The job ID
* @return The position (0-based) or -1 if not found
*/
public int getJobPosition(String jobId) {
if (!jobMap.containsKey(jobId)) {
return -1;
}
// Count positions
int position = 0;
for (QueuedJob job : jobQueue) {
if (job.jobId.equals(jobId)) {
return position;
}
position++;
}
// If we didn't find it in the queue but it's in the map,
// it might be executing already
return -1;
}
/**
* Cancels a queued job.
*
* @param jobId The job ID
* @return true if the job was cancelled, false if not found
*/
public boolean cancelJob(String jobId) {
QueuedJob job = jobMap.remove(jobId);
if (job != null) {
job.cancelled = true;
job.future.completeExceptionally(new RuntimeException("Job cancelled by user"));
// Try to remove from queue if it's still there
jobQueue.remove(job);
currentQueueSize = jobQueue.size();
log.debug("Job {} cancelled", jobId);
return true;
}
return false;
}
/**
* Get queue statistics.
*
* @return A map containing queue statistics
*/
public Map<String, Object> getQueueStats() {
return Map.of(
"queuedJobs", jobQueue.size(),
"queueCapacity", getQueueCapacity(),
"totalQueuedJobs", totalQueuedJobs,
"rejectedJobs", rejectedJobs,
"resourceStatus", resourceMonitor.getCurrentStatus().get().name());
}
}

View File

@ -1,279 +0,0 @@
package stirling.software.common.service;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.OperatingSystemMXBean;
import java.time.Duration;
import java.time.Instant;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
/**
* Monitors system resources (CPU, memory) to inform job scheduling decisions. Provides information
* about available resources to prevent overloading the system.
*/
@Service
@Slf4j
public class ResourceMonitor {
@Value("${stirling.resource.memory.critical-threshold:0.9}")
private double memoryCriticalThreshold = 0.9; // 90% usage is critical
@Value("${stirling.resource.memory.high-threshold:0.75}")
private double memoryHighThreshold = 0.75; // 75% usage is high
@Value("${stirling.resource.cpu.critical-threshold:0.9}")
private double cpuCriticalThreshold = 0.9; // 90% usage is critical
@Value("${stirling.resource.cpu.high-threshold:0.75}")
private double cpuHighThreshold = 0.75; // 75% usage is high
@Value("${stirling.resource.monitor.interval-ms:60000}")
private long monitorIntervalMs = 60000; // 60 seconds
private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor();
private final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
private final OperatingSystemMXBean osMXBean = ManagementFactory.getOperatingSystemMXBean();
@Getter
private final AtomicReference<ResourceStatus> currentStatus =
new AtomicReference<>(ResourceStatus.OK);
@Getter
private final AtomicReference<ResourceMetrics> latestMetrics =
new AtomicReference<>(new ResourceMetrics());
/** Represents the current status of system resources. */
public enum ResourceStatus {
/** Resources are available, normal operations can proceed */
OK,
/** Resources are under strain, consider queueing high-resource operations */
WARNING,
/** Resources are critically low, queue all operations */
CRITICAL
}
/** Detailed metrics about system resources. */
@Getter
public static class ResourceMetrics {
private final double cpuUsage;
private final double memoryUsage;
private final long freeMemoryBytes;
private final long totalMemoryBytes;
private final long maxMemoryBytes;
private final Instant timestamp;
public ResourceMetrics() {
this(0, 0, 0, 0, 0, Instant.now());
}
public ResourceMetrics(
double cpuUsage,
double memoryUsage,
long freeMemoryBytes,
long totalMemoryBytes,
long maxMemoryBytes,
Instant timestamp) {
this.cpuUsage = cpuUsage;
this.memoryUsage = memoryUsage;
this.freeMemoryBytes = freeMemoryBytes;
this.totalMemoryBytes = totalMemoryBytes;
this.maxMemoryBytes = maxMemoryBytes;
this.timestamp = timestamp;
}
/**
* Gets the age of these metrics.
*
* @return Duration since these metrics were collected
*/
public Duration getAge() {
return Duration.between(timestamp, Instant.now());
}
/**
* Check if these metrics are stale (older than threshold).
*
* @param thresholdMs Staleness threshold in milliseconds
* @return true if metrics are stale
*/
public boolean isStale(long thresholdMs) {
return getAge().toMillis() > thresholdMs;
}
}
@PostConstruct
public void initialize() {
log.debug("Starting resource monitoring with interval of {}ms", monitorIntervalMs);
scheduler.scheduleAtFixedRate(
this::updateResourceMetrics, 0, monitorIntervalMs, TimeUnit.MILLISECONDS);
}
@PreDestroy
public void shutdown() {
log.info("Shutting down resource monitoring");
scheduler.shutdownNow();
}
/** Updates the resource metrics by sampling current system state. */
private void updateResourceMetrics() {
try {
// Get CPU usage
double cpuUsage = osMXBean.getSystemLoadAverage() / osMXBean.getAvailableProcessors();
if (cpuUsage < 0) cpuUsage = getAlternativeCpuLoad(); // Fallback if not available
// Get memory usage
long heapUsed = memoryMXBean.getHeapMemoryUsage().getUsed();
long nonHeapUsed = memoryMXBean.getNonHeapMemoryUsage().getUsed();
long totalUsed = heapUsed + nonHeapUsed;
long maxMemory = Runtime.getRuntime().maxMemory();
long totalMemory = Runtime.getRuntime().totalMemory();
long freeMemory = Runtime.getRuntime().freeMemory();
double memoryUsage = (double) totalUsed / maxMemory;
// Create new metrics
ResourceMetrics metrics =
new ResourceMetrics(
cpuUsage,
memoryUsage,
freeMemory,
totalMemory,
maxMemory,
Instant.now());
latestMetrics.set(metrics);
// Determine system status
ResourceStatus newStatus;
if (cpuUsage > cpuCriticalThreshold || memoryUsage > memoryCriticalThreshold) {
newStatus = ResourceStatus.CRITICAL;
} else if (cpuUsage > cpuHighThreshold || memoryUsage > memoryHighThreshold) {
newStatus = ResourceStatus.WARNING;
} else {
newStatus = ResourceStatus.OK;
}
// Update status if it changed
ResourceStatus oldStatus = currentStatus.getAndSet(newStatus);
if (oldStatus != newStatus) {
log.info("System resource status changed from {} to {}", oldStatus, newStatus);
log.info(
"Current metrics - CPU: {}%, Memory: {}%, Free Memory: {} MB",
String.format("%.1f", cpuUsage * 100),
String.format("%.1f", memoryUsage * 100),
freeMemory / (1024 * 1024));
}
} catch (Exception e) {
log.error("Error updating resource metrics: {}", e.getMessage(), e);
}
}
/**
* Alternative method to estimate CPU load if getSystemLoadAverage() is not available. This is a
* fallback and less accurate than the official JMX method.
*
* @return Estimated CPU load as a value between 0.0 and 1.0
*/
private double getAlternativeCpuLoad() {
try {
// Try to get CPU time if available through reflection
// This is a fallback since we can't directly cast to platform-specific classes
try {
java.lang.reflect.Method m =
osMXBean.getClass().getDeclaredMethod("getProcessCpuLoad");
m.setAccessible(true);
return (double) m.invoke(osMXBean);
} catch (Exception e) {
// Try the older method
try {
java.lang.reflect.Method m =
osMXBean.getClass().getDeclaredMethod("getSystemCpuLoad");
m.setAccessible(true);
return (double) m.invoke(osMXBean);
} catch (Exception e2) {
log.trace(
"Could not get CPU load through reflection, assuming moderate load (0.5)");
return 0.5;
}
}
} catch (Exception e) {
log.trace("Could not get CPU load, assuming moderate load (0.5)");
return 0.5; // Default to moderate load
}
}
/**
* Calculates the dynamic job queue capacity based on current resource usage.
*
* @param baseCapacity The base capacity when system is under minimal load
* @param minCapacity The minimum capacity to maintain even under high load
* @return The calculated job queue capacity
*/
public int calculateDynamicQueueCapacity(int baseCapacity, int minCapacity) {
ResourceMetrics metrics = latestMetrics.get();
ResourceStatus status = currentStatus.get();
// Simple linear reduction based on memory and CPU load
double capacityFactor =
switch (status) {
case OK -> 1.0;
case WARNING -> 0.6;
case CRITICAL -> 0.3;
};
// Apply additional reduction based on specific memory pressure
if (metrics.memoryUsage > 0.8) {
capacityFactor *= 0.5; // Further reduce capacity under memory pressure
}
// Calculate capacity with minimum safeguard
int capacity = (int) Math.max(minCapacity, Math.ceil(baseCapacity * capacityFactor));
log.debug(
"Dynamic queue capacity: {} (base: {}, factor: {:.2f}, status: {})",
capacity,
baseCapacity,
capacityFactor,
status);
return capacity;
}
/**
* Checks if a job with the given weight can be executed immediately or should be queued based
* on current resource availability.
*
* @param resourceWeight The resource weight of the job (1-100)
* @return true if the job should be queued, false if it can run immediately
*/
public boolean shouldQueueJob(int resourceWeight) {
ResourceStatus status = currentStatus.get();
// Always run lightweight jobs (weight < 20) unless critical
if (resourceWeight < 20 && status != ResourceStatus.CRITICAL) {
return false;
}
// Medium weight jobs run immediately if resources are OK
if (resourceWeight < 60 && status == ResourceStatus.OK) {
return false;
}
// Heavy jobs (weight >= 60) and any job during WARNING/CRITICAL should be queued
return true;
}
}

View File

@ -1,208 +0,0 @@
package stirling.software.common.service;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.regex.Pattern;
import org.springframework.stereotype.Service;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.ApplicationProperties;
@Service
@RequiredArgsConstructor
@Slf4j
public class SsrfProtectionService {
private final ApplicationProperties applicationProperties;
private static final Pattern DATA_URL_PATTERN =
Pattern.compile("^data:.*", Pattern.CASE_INSENSITIVE);
private static final Pattern FRAGMENT_PATTERN = Pattern.compile("^#.*");
public enum SsrfProtectionLevel {
OFF, // No SSRF protection - allows all URLs
MEDIUM, // Block internal networks but allow external URLs
MAX // Block all external URLs - only data: and fragments
}
public boolean isUrlAllowed(String url) {
ApplicationProperties.Html.UrlSecurity config =
applicationProperties.getSystem().getHtml().getUrlSecurity();
if (!config.isEnabled()) {
return true;
}
if (url == null || url.trim().isEmpty()) {
return false;
}
String trimmedUrl = url.trim();
// Always allow data URLs and fragments
if (DATA_URL_PATTERN.matcher(trimmedUrl).matches()
|| FRAGMENT_PATTERN.matcher(trimmedUrl).matches()) {
return true;
}
SsrfProtectionLevel level = parseProtectionLevel(config.getLevel());
switch (level) {
case OFF:
return true;
case MAX:
return isMaxSecurityAllowed(trimmedUrl, config);
case MEDIUM:
return isMediumSecurityAllowed(trimmedUrl, config);
default:
return false;
}
}
private SsrfProtectionLevel parseProtectionLevel(String level) {
try {
return SsrfProtectionLevel.valueOf(level.toUpperCase());
} catch (IllegalArgumentException e) {
log.warn("Invalid SSRF protection level '{}', defaulting to MEDIUM", level);
return SsrfProtectionLevel.MEDIUM;
}
}
private boolean isMaxSecurityAllowed(
String url, ApplicationProperties.Html.UrlSecurity config) {
// MAX security: only allow explicitly whitelisted domains
try {
URI uri = new URI(url);
String host = uri.getHost();
if (host == null) {
return false;
}
return config.getAllowedDomains().contains(host.toLowerCase());
} catch (Exception e) {
log.debug("Failed to parse URL for MAX security check: {}", url, e);
return false;
}
}
private boolean isMediumSecurityAllowed(
String url, ApplicationProperties.Html.UrlSecurity config) {
try {
URI uri = new URI(url);
String host = uri.getHost();
if (host == null) {
return false;
}
String hostLower = host.toLowerCase();
// Check explicit blocked domains
if (config.getBlockedDomains().contains(hostLower)) {
log.debug("URL blocked by explicit domain blocklist: {}", url);
return false;
}
// Check internal TLD patterns
for (String tld : config.getInternalTlds()) {
if (hostLower.endsWith(tld.toLowerCase())) {
log.debug("URL blocked by internal TLD pattern '{}': {}", tld, url);
return false;
}
}
// If allowedDomains is specified, only allow those
if (!config.getAllowedDomains().isEmpty()) {
boolean isAllowed =
config.getAllowedDomains().stream()
.anyMatch(
domain ->
hostLower.equals(domain.toLowerCase())
|| hostLower.endsWith(
"." + domain.toLowerCase()));
if (!isAllowed) {
log.debug("URL not in allowed domains list: {}", url);
return false;
}
}
// Resolve hostname to IP address for network-based checks
try {
InetAddress address = InetAddress.getByName(host);
if (config.isBlockPrivateNetworks() && isPrivateAddress(address)) {
log.debug("URL blocked - private network address: {}", url);
return false;
}
if (config.isBlockLocalhost() && address.isLoopbackAddress()) {
log.debug("URL blocked - localhost address: {}", url);
return false;
}
if (config.isBlockLinkLocal() && address.isLinkLocalAddress()) {
log.debug("URL blocked - link-local address: {}", url);
return false;
}
if (config.isBlockCloudMetadata()
&& isCloudMetadataAddress(address.getHostAddress())) {
log.debug("URL blocked - cloud metadata endpoint: {}", url);
return false;
}
} catch (UnknownHostException e) {
log.debug("Failed to resolve hostname for SSRF check: {}", host, e);
return false;
}
return true;
} catch (Exception e) {
log.debug("Failed to parse URL for MEDIUM security check: {}", url, e);
return false;
}
}
private boolean isPrivateAddress(InetAddress address) {
return address.isSiteLocalAddress()
|| address.isAnyLocalAddress()
|| isPrivateIPv4Range(address.getHostAddress());
}
private boolean isPrivateIPv4Range(String ip) {
return ip.startsWith("10.")
|| ip.startsWith("192.168.")
|| (ip.startsWith("172.") && isInRange172(ip))
|| ip.startsWith("127.")
|| "0.0.0.0".equals(ip);
}
private boolean isInRange172(String ip) {
String[] parts = ip.split("\\.");
if (parts.length >= 2) {
try {
int secondOctet = Integer.parseInt(parts[1]);
return secondOctet >= 16 && secondOctet <= 31;
} catch (NumberFormatException e) {
return false;
}
}
return false;
}
private boolean isCloudMetadataAddress(String ip) {
// Cloud metadata endpoints for AWS, GCP, Azure, Oracle Cloud, and IBM Cloud
return ip.startsWith("169.254.169.254") // AWS/GCP/Azure
|| ip.startsWith("fd00:ec2::254") // AWS IPv6
|| ip.startsWith("169.254.169.253") // Oracle Cloud
|| ip.startsWith("169.254.169.250"); // IBM Cloud
}
}

View File

@ -1,466 +0,0 @@
package stirling.software.common.service;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import io.github.pixee.security.ZipSecurity;
import jakarta.annotation.PreDestroy;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.job.JobResult;
import stirling.software.common.model.job.JobStats;
import stirling.software.common.model.job.ResultFile;
/** Manages async tasks and their results */
@Service
@Slf4j
public class TaskManager {
private final Map<String, JobResult> jobResults = new ConcurrentHashMap<>();
@Value("${stirling.jobResultExpiryMinutes:30}")
private int jobResultExpiryMinutes = 30;
private final FileStorage fileStorage;
private final ScheduledExecutorService cleanupExecutor =
Executors.newSingleThreadScheduledExecutor();
/** Initialize the task manager and start the cleanup scheduler */
public TaskManager(FileStorage fileStorage) {
this.fileStorage = fileStorage;
// Schedule periodic cleanup of old job results
cleanupExecutor.scheduleAtFixedRate(
this::cleanupOldJobs,
10, // Initial delay
10, // Interval
TimeUnit.MINUTES);
log.debug(
"Task manager initialized with job result expiry of {} minutes",
jobResultExpiryMinutes);
}
/**
* Create a new task with the given job ID
*
* @param jobId The job ID
*/
public void createTask(String jobId) {
jobResults.put(jobId, JobResult.createNew(jobId));
log.debug("Created task with job ID: {}", jobId);
}
/**
* Set the result of a task as a general object
*
* @param jobId The job ID
* @param result The result object
*/
public void setResult(String jobId, Object result) {
JobResult jobResult = getOrCreateJobResult(jobId);
jobResult.completeWithResult(result);
log.debug("Set result for job ID: {}", jobId);
}
/**
* Set the result of a task as a file
*
* @param jobId The job ID
* @param fileId The file ID
* @param originalFileName The original file name
* @param contentType The content type of the file
*/
public void setFileResult(
String jobId, String fileId, String originalFileName, String contentType) {
JobResult jobResult = getOrCreateJobResult(jobId);
// Check if this is a ZIP file that should be extracted
if (isZipFile(contentType, originalFileName)) {
try {
List<ResultFile> extractedFiles =
extractZipToIndividualFiles(fileId, originalFileName);
if (!extractedFiles.isEmpty()) {
jobResult.completeWithFiles(extractedFiles);
log.debug(
"Set multiple file results for job ID: {} with {} files extracted from ZIP",
jobId,
extractedFiles.size());
return;
}
} catch (Exception e) {
log.warn(
"Failed to extract ZIP file for job {}: {}. Falling back to single file result.",
jobId,
e.getMessage());
}
}
// Handle as single file using new ResultFile approach
try {
long fileSize = fileStorage.getFileSize(fileId);
jobResult.completeWithSingleFile(fileId, originalFileName, contentType, fileSize);
log.debug("Set single file result for job ID: {} with file ID: {}", jobId, fileId);
} catch (Exception e) {
log.warn(
"Failed to get file size for job {}: {}. Using size 0.", jobId, e.getMessage());
jobResult.completeWithSingleFile(fileId, originalFileName, contentType, 0);
}
}
/**
* Set the result of a task as multiple files
*
* @param jobId The job ID
* @param resultFiles The list of result files
*/
public void setMultipleFileResults(String jobId, List<ResultFile> resultFiles) {
JobResult jobResult = getOrCreateJobResult(jobId);
jobResult.completeWithFiles(resultFiles);
log.debug(
"Set multiple file results for job ID: {} with {} files",
jobId,
resultFiles.size());
}
/**
* Set an error for a task
*
* @param jobId The job ID
* @param error The error message
*/
public void setError(String jobId, String error) {
JobResult jobResult = getOrCreateJobResult(jobId);
jobResult.failWithError(error);
log.debug("Set error for job ID: {}: {}", jobId, error);
}
/**
* Mark a task as complete
*
* @param jobId The job ID
*/
public void setComplete(String jobId) {
JobResult jobResult = getOrCreateJobResult(jobId);
if (jobResult.getResult() == null
&& !jobResult.hasFiles()
&& jobResult.getError() == null) {
// If no result or error has been set, mark it as complete with an empty result
jobResult.completeWithResult("Task completed successfully");
}
log.debug("Marked job ID: {} as complete", jobId);
}
/**
* Check if a task is complete
*
* @param jobId The job ID
* @return true if the task is complete, false otherwise
*/
public boolean isComplete(String jobId) {
JobResult result = jobResults.get(jobId);
return result != null && result.isComplete();
}
/**
* Get the result of a task
*
* @param jobId The job ID
* @return The result object, or null if the task doesn't exist or is not complete
*/
public JobResult getJobResult(String jobId) {
return jobResults.get(jobId);
}
/**
* Add a note to a task. Notes are informational messages that can be attached to a job for
* tracking purposes.
*
* @param jobId The job ID
* @param note The note to add
* @return true if the note was added successfully, false if the job doesn't exist
*/
public boolean addNote(String jobId, String note) {
JobResult jobResult = jobResults.get(jobId);
if (jobResult != null) {
jobResult.addNote(note);
log.debug("Added note to job ID: {}: {}", jobId, note);
return true;
}
log.warn("Attempted to add note to non-existent job ID: {}", jobId);
return false;
}
/**
* Get statistics about all jobs in the system
*
* @return Job statistics
*/
public JobStats getJobStats() {
int totalJobs = jobResults.size();
int activeJobs = 0;
int completedJobs = 0;
int failedJobs = 0;
int successfulJobs = 0;
int fileResultJobs = 0;
LocalDateTime oldestActiveJobTime = null;
LocalDateTime newestActiveJobTime = null;
long totalProcessingTimeMs = 0;
for (JobResult result : jobResults.values()) {
if (result.isComplete()) {
completedJobs++;
// Calculate processing time for completed jobs
if (result.getCreatedAt() != null && result.getCompletedAt() != null) {
long processingTimeMs =
java.time.Duration.between(
result.getCreatedAt(), result.getCompletedAt())
.toMillis();
totalProcessingTimeMs += processingTimeMs;
}
if (result.getError() != null) {
failedJobs++;
} else {
successfulJobs++;
if (result.hasFiles()) {
fileResultJobs++;
}
}
} else {
activeJobs++;
// Track oldest and newest active jobs
if (result.getCreatedAt() != null) {
if (oldestActiveJobTime == null
|| result.getCreatedAt().isBefore(oldestActiveJobTime)) {
oldestActiveJobTime = result.getCreatedAt();
}
if (newestActiveJobTime == null
|| result.getCreatedAt().isAfter(newestActiveJobTime)) {
newestActiveJobTime = result.getCreatedAt();
}
}
}
}
// Calculate average processing time
long averageProcessingTimeMs =
completedJobs > 0 ? totalProcessingTimeMs / completedJobs : 0;
return JobStats.builder()
.totalJobs(totalJobs)
.activeJobs(activeJobs)
.completedJobs(completedJobs)
.failedJobs(failedJobs)
.successfulJobs(successfulJobs)
.fileResultJobs(fileResultJobs)
.oldestActiveJobTime(oldestActiveJobTime)
.newestActiveJobTime(newestActiveJobTime)
.averageProcessingTimeMs(averageProcessingTimeMs)
.build();
}
/**
* Get or create a job result
*
* @param jobId The job ID
* @return The job result
*/
private JobResult getOrCreateJobResult(String jobId) {
return jobResults.computeIfAbsent(jobId, JobResult::createNew);
}
/** Clean up old completed job results */
public void cleanupOldJobs() {
LocalDateTime expiryThreshold =
LocalDateTime.now().minus(jobResultExpiryMinutes, ChronoUnit.MINUTES);
int removedCount = 0;
try {
for (Map.Entry<String, JobResult> entry : jobResults.entrySet()) {
JobResult result = entry.getValue();
// Remove completed jobs that are older than the expiry threshold
if (result.isComplete()
&& result.getCompletedAt() != null
&& result.getCompletedAt().isBefore(expiryThreshold)) {
// Clean up file results
cleanupJobFiles(result, entry.getKey());
// Remove the job result
jobResults.remove(entry.getKey());
removedCount++;
}
}
if (removedCount > 0) {
log.info("Cleaned up {} expired job results", removedCount);
}
} catch (Exception e) {
log.error("Error during job cleanup: {}", e.getMessage(), e);
}
}
/** Shutdown the cleanup executor */
@PreDestroy
public void shutdown() {
try {
log.info("Shutting down job result cleanup executor");
cleanupExecutor.shutdown();
if (!cleanupExecutor.awaitTermination(5, TimeUnit.SECONDS)) {
cleanupExecutor.shutdownNow();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
cleanupExecutor.shutdownNow();
}
}
/** Check if a file is a ZIP file based on content type and filename */
private boolean isZipFile(String contentType, String fileName) {
if (contentType != null
&& (contentType.equals("application/zip")
|| contentType.equals("application/x-zip-compressed"))) {
return true;
}
if (fileName != null && fileName.toLowerCase().endsWith(".zip")) {
return true;
}
return false;
}
/** Extract a ZIP file into individual files and store them */
private List<ResultFile> extractZipToIndividualFiles(
String zipFileId, String originalZipFileName) throws IOException {
List<ResultFile> extractedFiles = new ArrayList<>();
MultipartFile zipFile = fileStorage.retrieveFile(zipFileId);
try (ZipInputStream zipIn =
ZipSecurity.createHardenedInputStream(
new ByteArrayInputStream(zipFile.getBytes()))) {
ZipEntry entry;
while ((entry = zipIn.getNextEntry()) != null) {
if (!entry.isDirectory()) {
// Use buffered reading for memory safety
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = zipIn.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
byte[] fileContent = out.toByteArray();
String contentType = determineContentType(entry.getName());
String individualFileId = fileStorage.storeBytes(fileContent, entry.getName());
ResultFile resultFile =
ResultFile.builder()
.fileId(individualFileId)
.fileName(entry.getName())
.contentType(contentType)
.fileSize(fileContent.length)
.build();
extractedFiles.add(resultFile);
log.debug(
"Extracted file: {} (size: {} bytes)",
entry.getName(),
fileContent.length);
}
zipIn.closeEntry();
}
}
// Clean up the original ZIP file after extraction
try {
fileStorage.deleteFile(zipFileId);
log.debug("Cleaned up original ZIP file: {}", zipFileId);
} catch (Exception e) {
log.warn("Failed to clean up original ZIP file {}: {}", zipFileId, e.getMessage());
}
return extractedFiles;
}
/** Determine content type based on file extension */
private String determineContentType(String fileName) {
if (fileName == null) {
return MediaType.APPLICATION_OCTET_STREAM_VALUE;
}
String lowerName = fileName.toLowerCase();
if (lowerName.endsWith(".pdf")) {
return MediaType.APPLICATION_PDF_VALUE;
} else if (lowerName.endsWith(".txt")) {
return MediaType.TEXT_PLAIN_VALUE;
} else if (lowerName.endsWith(".json")) {
return MediaType.APPLICATION_JSON_VALUE;
} else if (lowerName.endsWith(".xml")) {
return MediaType.APPLICATION_XML_VALUE;
} else if (lowerName.endsWith(".jpg") || lowerName.endsWith(".jpeg")) {
return MediaType.IMAGE_JPEG_VALUE;
} else if (lowerName.endsWith(".png")) {
return MediaType.IMAGE_PNG_VALUE;
} else {
return MediaType.APPLICATION_OCTET_STREAM_VALUE;
}
}
/** Clean up files associated with a job result */
private void cleanupJobFiles(JobResult result, String jobId) {
// Clean up all result files
if (result.hasFiles()) {
for (ResultFile resultFile : result.getAllResultFiles()) {
try {
fileStorage.deleteFile(resultFile.getFileId());
} catch (Exception e) {
log.warn(
"Failed to delete file {} for job {}: {}",
resultFile.getFileId(),
jobId,
e.getMessage());
}
}
}
}
/** Find the ResultFile metadata for a given file ID by searching through all job results */
public ResultFile findResultFileByFileId(String fileId) {
for (JobResult jobResult : jobResults.values()) {
if (jobResult.hasFiles()) {
for (ResultFile resultFile : jobResult.getAllResultFiles()) {
if (fileId.equals(resultFile.getFileId())) {
return resultFile;
}
}
}
}
return null;
}
}

View File

@ -1,449 +0,0 @@
package stirling.software.common.service;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Stream;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.ApplicationProperties;
import stirling.software.common.util.GeneralUtils;
import stirling.software.common.util.TempFileManager;
import stirling.software.common.util.TempFileRegistry;
/**
* Service to periodically clean up temporary files. Runs scheduled tasks to delete old temp files
* and directories.
*/
@Slf4j
@Service
@RequiredArgsConstructor
public class TempFileCleanupService {
private final TempFileRegistry registry;
private final TempFileManager tempFileManager;
private final ApplicationProperties applicationProperties;
@Autowired
@Qualifier("machineType")
private String machineType;
// Maximum recursion depth for directory traversal
private static final int MAX_RECURSION_DEPTH = 5;
// File patterns that identify our temp files
private static final Predicate<String> IS_OUR_TEMP_FILE =
fileName ->
fileName.startsWith("stirling-pdf-")
|| fileName.startsWith("output_")
|| fileName.startsWith("compressedPDF")
|| fileName.startsWith("pdf-save-")
|| fileName.startsWith("pdf-stream-")
|| fileName.startsWith("PDFBox")
|| fileName.startsWith("input_")
|| fileName.startsWith("overlay-");
// File patterns that identify common system temp files
private static final Predicate<String> IS_SYSTEM_TEMP_FILE =
fileName ->
fileName.matches("lu\\d+[a-z0-9]*\\.tmp")
|| fileName.matches("ocr_process\\d+")
|| (fileName.startsWith("tmp") && !fileName.contains("jetty"))
|| fileName.startsWith("OSL_PIPE_")
|| (fileName.endsWith(".tmp") && !fileName.contains("jetty"));
// File patterns that should be excluded from cleanup
private static final Predicate<String> SHOULD_SKIP =
fileName ->
fileName.contains("jetty")
|| fileName.startsWith("jetty-")
|| "proc".equals(fileName)
|| "sys".equals(fileName)
|| "dev".equals(fileName)
|| "hsperfdata_stirlingpdfuser".equals(fileName)
|| fileName.startsWith("hsperfdata_")
|| ".pdfbox.cache".equals(fileName);
@PostConstruct
public void init() {
// Create necessary directories
ensureDirectoriesExist();
// Perform startup cleanup if enabled
if (applicationProperties.getSystem().getTempFileManagement().isStartupCleanup()) {
runStartupCleanup();
}
}
/** Ensure that all required temp directories exist */
private void ensureDirectoriesExist() {
try {
ApplicationProperties.TempFileManagement tempFiles =
applicationProperties.getSystem().getTempFileManagement();
// Create the main temp directory
String customTempDirectory = tempFiles.getBaseTmpDir();
if (customTempDirectory != null && !customTempDirectory.isEmpty()) {
Path tempDir = Path.of(customTempDirectory);
if (!Files.exists(tempDir)) {
Files.createDirectories(tempDir);
log.info("Created temp directory: {}", tempDir);
}
}
// Create LibreOffice temp directory
String libreOfficeTempDir = tempFiles.getLibreofficeDir();
if (libreOfficeTempDir != null && !libreOfficeTempDir.isEmpty()) {
Path loTempDir = Path.of(libreOfficeTempDir);
if (!Files.exists(loTempDir)) {
Files.createDirectories(loTempDir);
log.info("Created LibreOffice temp directory: {}", loTempDir);
}
}
} catch (IOException e) {
log.error("Error creating temp directories", e);
}
}
/** Scheduled task to clean up old temporary files. Runs at the configured interval. */
@Scheduled(
fixedDelayString =
"#{applicationProperties.system.tempFileManagement.cleanupIntervalMinutes}",
timeUnit = TimeUnit.MINUTES)
public void scheduledCleanup() {
log.info("Running scheduled temporary file cleanup");
long maxAgeMillis = tempFileManager.getMaxAgeMillis();
// Clean up registered temp files (managed by TempFileRegistry)
int registeredDeletedCount = tempFileManager.cleanupOldTempFiles(maxAgeMillis);
log.info("Cleaned up {} registered temporary files", registeredDeletedCount);
// Clean up registered temp directories
int directoriesDeletedCount = 0;
for (Path directory : registry.getTempDirectories()) {
try {
if (Files.exists(directory)) {
GeneralUtils.deleteDirectory(directory);
directoriesDeletedCount++;
log.debug("Cleaned up temporary directory: {}", directory);
}
} catch (IOException e) {
log.warn("Failed to clean up temporary directory: {}", directory, e);
}
}
// Clean up PDFBox cache file
cleanupPDFBoxCache();
// Clean up unregistered temp files based on our cleanup strategy
boolean containerMode = isContainerMode();
int unregisteredDeletedCount = cleanupUnregisteredFiles(containerMode, true, maxAgeMillis);
if (registeredDeletedCount > 0
|| unregisteredDeletedCount > 0
|| directoriesDeletedCount > 0) {
log.info(
"Scheduled cleanup complete. Deleted {} registered files, {} unregistered files, {} directories",
registeredDeletedCount,
unregisteredDeletedCount,
directoriesDeletedCount);
}
}
/**
* Perform startup cleanup of stale temporary files from previous runs. This is especially
* important in Docker environments where temp files persist between container restarts.
*/
private void runStartupCleanup() {
boolean containerMode = isContainerMode();
log.info(
"Running in {} mode, using {} cleanup strategy",
machineType,
containerMode ? "aggressive" : "conservative");
// For startup cleanup, we use a longer timeout for non-container environments
long maxAgeMillis = containerMode ? 0 : 24 * 60 * 60 * 1000; // 0 or 24 hours
int totalDeletedCount = cleanupUnregisteredFiles(containerMode, false, maxAgeMillis);
log.info(
"Startup cleanup complete. Deleted {} temporary files/directories",
totalDeletedCount);
}
/**
* Clean up unregistered temporary files across all configured temp directories.
*
* @param containerMode Whether we're in container mode (more aggressive cleanup)
* @param isScheduled Whether this is a scheduled cleanup or startup cleanup
* @param maxAgeMillis Maximum age of files to clean in milliseconds
* @return Number of files deleted
*/
private int cleanupUnregisteredFiles(
boolean containerMode, boolean isScheduled, long maxAgeMillis) {
AtomicInteger totalDeletedCount = new AtomicInteger(0);
try {
ApplicationProperties.TempFileManagement tempFiles =
applicationProperties.getSystem().getTempFileManagement();
Path[] dirsToScan;
if (tempFiles.isCleanupSystemTemp()
&& tempFiles.getSystemTempDir() != null
&& !tempFiles.getSystemTempDir().isEmpty()) {
Path systemTempPath = getSystemTempPath();
dirsToScan =
new Path[] {
systemTempPath,
Path.of(tempFiles.getBaseTmpDir()),
Path.of(tempFiles.getLibreofficeDir())
};
} else {
dirsToScan =
new Path[] {
Path.of(tempFiles.getBaseTmpDir()),
Path.of(tempFiles.getLibreofficeDir())
};
}
// Process each directory
Arrays.stream(dirsToScan)
.filter(Files::exists)
.forEach(
tempDir -> {
try {
String phase = isScheduled ? "scheduled" : "startup";
log.debug(
"Scanning directory for {} cleanup: {}",
phase,
tempDir);
AtomicInteger dirDeletedCount = new AtomicInteger(0);
cleanupDirectoryStreaming(
tempDir,
containerMode,
0,
maxAgeMillis,
isScheduled,
path -> {
dirDeletedCount.incrementAndGet();
if (log.isDebugEnabled()) {
log.debug(
"Deleted temp file during {} cleanup: {}",
phase,
path);
}
});
int count = dirDeletedCount.get();
totalDeletedCount.addAndGet(count);
if (count > 0) {
log.info(
"Cleaned up {} files/directories in {}",
count,
tempDir);
}
} catch (IOException e) {
log.error("Error during cleanup of directory: {}", tempDir, e);
}
});
} catch (Exception e) {
log.error("Error during cleanup of unregistered files", e);
}
return totalDeletedCount.get();
}
/** Get the system temp directory path based on configuration or system property. */
private Path getSystemTempPath() {
String systemTempDir =
applicationProperties.getSystem().getTempFileManagement().getSystemTempDir();
if (systemTempDir != null && !systemTempDir.isEmpty()) {
return Path.of(systemTempDir);
} else {
return Path.of(System.getProperty("java.io.tmpdir"));
}
}
/** Determine if we're running in a container environment. */
private boolean isContainerMode() {
return "Docker".equals(machineType) || "Kubernetes".equals(machineType);
}
/**
* Recursively clean up a directory using a streaming approach to reduce memory usage.
*
* @param directory The directory to clean
* @param containerMode Whether we're in container mode (more aggressive cleanup)
* @param depth Current recursion depth
* @param maxAgeMillis Maximum age of files to delete
* @param isScheduled Whether this is a scheduled cleanup (vs startup)
* @param onDeleteCallback Callback function when a file is deleted
* @throws IOException If an I/O error occurs
*/
private void cleanupDirectoryStreaming(
Path directory,
boolean containerMode,
int depth,
long maxAgeMillis,
boolean isScheduled,
Consumer<Path> onDeleteCallback)
throws IOException {
if (depth > MAX_RECURSION_DEPTH) {
log.debug("Maximum directory recursion depth reached for: {}", directory);
return;
}
java.util.List<Path> subdirectories = new java.util.ArrayList<>();
try (Stream<Path> pathStream = Files.list(directory)) {
pathStream.forEach(
path -> {
try {
String fileName = path.getFileName().toString();
if (SHOULD_SKIP.test(fileName)) {
return;
}
if (Files.isDirectory(path)) {
subdirectories.add(path);
return;
}
if (registry.contains(path.toFile())) {
return;
}
if (shouldDeleteFile(path, fileName, containerMode, maxAgeMillis)) {
try {
Files.deleteIfExists(path);
onDeleteCallback.accept(path);
} catch (IOException e) {
if (e.getMessage() != null
&& e.getMessage()
.contains("being used by another process")) {
log.debug("File locked, skipping delete: {}", path);
} else {
log.warn("Failed to delete temp file: {}", path, e);
}
}
}
} catch (Exception e) {
log.warn("Error processing path: {}", path, e);
}
});
}
for (Path subdirectory : subdirectories) {
try {
cleanupDirectoryStreaming(
subdirectory,
containerMode,
depth + 1,
maxAgeMillis,
isScheduled,
onDeleteCallback);
} catch (IOException e) {
log.warn("Error processing subdirectory: {}", subdirectory, e);
}
}
}
/** Determine if a file should be deleted based on its name, age, and other criteria. */
private boolean shouldDeleteFile(
Path path, String fileName, boolean containerMode, long maxAgeMillis) {
// First check if it matches our known temp file patterns
boolean isOurTempFile = IS_OUR_TEMP_FILE.test(fileName);
boolean isSystemTempFile = IS_SYSTEM_TEMP_FILE.test(fileName);
// Normal operation - check against temp file patterns
boolean shouldDelete = isOurTempFile || (containerMode && isSystemTempFile);
// Get file info for age checks
long lastModified = 0;
long currentTime = System.currentTimeMillis();
boolean isEmptyFile = false;
try {
lastModified = Files.getLastModifiedTime(path).toMillis();
// Special case for zero-byte files - these are often corrupted temp files
if (Files.size(path) == 0) {
isEmptyFile = true;
// For empty files, use a shorter timeout (5 minutes)
// Delete empty files older than 5 minutes
if ((currentTime - lastModified) > 5 * 60 * 1000) {
shouldDelete = true;
}
}
} catch (IOException e) {
log.debug("Could not check file info, skipping: {}", path);
}
// Check file age against maxAgeMillis only if it's not an empty file that we've already
// decided to delete
if (!isEmptyFile && shouldDelete && maxAgeMillis > 0) {
// In normal mode, check age against maxAgeMillis
shouldDelete = (currentTime - lastModified) > maxAgeMillis;
}
return shouldDelete;
}
/** Clean up LibreOffice temporary files. This method is called after LibreOffice operations. */
public void cleanupLibreOfficeTempFiles() {
// Cleanup known LibreOffice temp directories
try {
Set<Path> directories = registry.getTempDirectories();
for (Path dir : directories) {
if (dir.getFileName().toString().contains("libreoffice") && Files.exists(dir)) {
// For directories containing "libreoffice", delete all contents
// but keep the directory itself for future use
cleanupDirectoryStreaming(
dir,
isContainerMode(),
0,
0, // age doesn't matter for LibreOffice cleanup
false,
path -> log.debug("Cleaned up LibreOffice temp file: {}", path));
log.debug("Cleaned up LibreOffice temp directory contents: {}", dir);
}
}
} catch (IOException e) {
log.warn("Failed to clean up LibreOffice temp files", e);
}
}
/**
* Clean up PDFBox cache file from user home directory. This cache file can grow large and
* should be periodically cleaned.
*/
private void cleanupPDFBoxCache() {
try {
Path userHome = Path.of(System.getProperty("user.home"));
Path pdfboxCache = userHome.resolve(".pdfbox.cache");
if (Files.exists(pdfboxCache)) {
Files.deleteIfExists(pdfboxCache);
log.debug("Cleaned up PDFBox cache file: {}", pdfboxCache);
}
} catch (IOException e) {
log.warn("Failed to clean up PDFBox cache file", e);
}
}
}

View File

@ -1,76 +0,0 @@
package stirling.software.common.util;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
/**
* Helper class that provides access to the ApplicationContext. Useful for getting beans in classes
* that are not managed by Spring.
*/
@Component
public class ApplicationContextProvider implements ApplicationContextAware {
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
applicationContext = context;
}
/**
* Get a bean by class type.
*
* @param <T> The type of the bean
* @param beanClass The class of the bean
* @return The bean instance, or null if not found
*/
public static <T> T getBean(Class<T> beanClass) {
if (applicationContext == null) {
return null;
}
try {
return applicationContext.getBean(beanClass);
} catch (BeansException e) {
return null;
}
}
/**
* Get a bean by name and class type.
*
* @param <T> The type of the bean
* @param name The name of the bean
* @param beanClass The class of the bean
* @return The bean instance, or null if not found
*/
public static <T> T getBean(String name, Class<T> beanClass) {
if (applicationContext == null) {
return null;
}
try {
return applicationContext.getBean(name, beanClass);
} catch (BeansException e) {
return null;
}
}
/**
* Check if a bean of the specified type exists.
*
* @param beanClass The class of the bean
* @return true if the bean exists, false otherwise
*/
public static boolean containsBean(Class<?> beanClass) {
if (applicationContext == null) {
return false;
}
try {
applicationContext.getBean(beanClass);
return true;
} catch (BeansException e) {
return false;
}
}
}

View File

@ -1,50 +0,0 @@
package stirling.software.common.util;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentCatalog;
import org.apache.pdfbox.pdmodel.PageMode;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class AttachmentUtils {
/**
* Sets the PDF catalog viewer preferences to display attachments in the viewer.
*
* @param document The <code>PDDocument</code> to modify.
* @param pageMode The <code>PageMode</code> to set for the PDF viewer. <code>PageMode</code>
* values: <code>UseNone</code>, <code>UseOutlines</code>, <code>UseThumbs</code>, <code>
* FullScreen</code>, <code>UseOC</code>, <code>UseAttachments</code>.
*/
public static void setCatalogViewerPreferences(PDDocument document, PageMode pageMode) {
try {
PDDocumentCatalog catalog = document.getDocumentCatalog();
if (catalog != null) {
COSDictionary catalogDict = catalog.getCOSObject();
catalog.setPageMode(pageMode);
catalogDict.setName(COSName.PAGE_MODE, pageMode.stringValue());
COSDictionary viewerPrefs =
(COSDictionary) catalogDict.getDictionaryObject(COSName.VIEWER_PREFERENCES);
if (viewerPrefs == null) {
viewerPrefs = new COSDictionary();
catalogDict.setItem(COSName.VIEWER_PREFERENCES, viewerPrefs);
}
viewerPrefs.setName(
COSName.getPDFName("NonFullScreenPageMode"), pageMode.stringValue());
viewerPrefs.setBoolean(COSName.getPDFName("DisplayDocTitle"), true);
log.info(
"Set PDF PageMode to UseAttachments to automatically show attachments pane");
}
} catch (Exception e) {
log.error("Failed to set catalog viewer preferences for attachments", e);
}
}
}

View File

@ -1,71 +0,0 @@
package stirling.software.common.util;
import org.owasp.html.AttributePolicy;
import org.owasp.html.HtmlPolicyBuilder;
import org.owasp.html.PolicyFactory;
import org.owasp.html.Sanitizers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import stirling.software.common.model.ApplicationProperties;
import stirling.software.common.service.SsrfProtectionService;
@Component
public class CustomHtmlSanitizer {
private final SsrfProtectionService ssrfProtectionService;
private final ApplicationProperties applicationProperties;
@Autowired
public CustomHtmlSanitizer(
SsrfProtectionService ssrfProtectionService,
ApplicationProperties applicationProperties) {
this.ssrfProtectionService = ssrfProtectionService;
this.applicationProperties = applicationProperties;
}
private final AttributePolicy SSRF_SAFE_URL_POLICY =
new AttributePolicy() {
@Override
public String apply(String elementName, String attributeName, String value) {
if (value == null || value.trim().isEmpty()) {
return null;
}
String trimmedValue = value.trim();
// Use the SSRF protection service to validate the URL
if (ssrfProtectionService != null
&& !ssrfProtectionService.isUrlAllowed(trimmedValue)) {
return null;
}
return trimmedValue;
}
};
private final PolicyFactory SSRF_SAFE_IMAGES_POLICY =
new HtmlPolicyBuilder()
.allowElements("img")
.allowAttributes("alt", "width", "height", "title")
.onElements("img")
.allowAttributes("src")
.matching(SSRF_SAFE_URL_POLICY)
.onElements("img")
.toFactory();
private final PolicyFactory POLICY =
Sanitizers.FORMATTING
.and(Sanitizers.BLOCKS)
.and(Sanitizers.STYLES)
.and(Sanitizers.LINKS)
.and(Sanitizers.TABLES)
.and(SSRF_SAFE_IMAGES_POLICY)
.and(new HtmlPolicyBuilder().disallowElements("noscript").toFactory());
public String sanitize(String html) {
boolean disableSanitize =
Boolean.TRUE.equals(applicationProperties.getSystem().getDisableSanitize());
return disableSanitize ? html : POLICY.sanitize(html);
}
}

View File

@ -1,327 +0,0 @@
package stirling.software.common.util;
import java.io.IOException;
import java.text.MessageFormat;
import lombok.extern.slf4j.Slf4j;
/**
* Utility class for handling exceptions with internationalized error messages. Provides consistent
* error handling and user-friendly messages across the application.
*/
@Slf4j
public class ExceptionUtils {
/**
* Create an IOException with internationalized message for PDF corruption.
*
* @param cause the original exception
* @return IOException with user-friendly message
*/
public static IOException createPdfCorruptedException(Exception cause) {
return createPdfCorruptedException(null, cause);
}
/**
* Create an IOException with internationalized message for PDF corruption with context.
*
* @param context additional context (e.g., "during merge", "during image extraction")
* @param cause the original exception
* @return IOException with user-friendly message
*/
public static IOException createPdfCorruptedException(String context, Exception cause) {
String message;
if (context != null && !context.isEmpty()) {
message =
String.format(
"Error %s: PDF file appears to be corrupted or damaged. Please try using the 'Repair PDF' feature first to fix the file before proceeding with this operation.",
context);
} else {
message =
"PDF file appears to be corrupted or damaged. Please try using the 'Repair PDF' feature first to fix the file before proceeding with this operation.";
}
return new IOException(message, cause);
}
/**
* Create an IOException with internationalized message for multiple corrupted PDFs.
*
* @param cause the original exception
* @return IOException with user-friendly message
*/
public static IOException createMultiplePdfCorruptedException(Exception cause) {
String message =
"One or more PDF files appear to be corrupted or damaged. Please try using the 'Repair PDF' feature on each file first before attempting to merge them.";
return new IOException(message, cause);
}
/**
* Create an IOException with internationalized message for PDF encryption issues.
*
* @param cause the original exception
* @return IOException with user-friendly message
*/
public static IOException createPdfEncryptionException(Exception cause) {
String message =
"The PDF appears to have corrupted encryption data. This can happen when the PDF was created with incompatible encryption methods. Please try using the 'Repair PDF' feature first, or contact the document creator for a new copy.";
return new IOException(message, cause);
}
/**
* Create an IOException with internationalized message for PDF password issues.
*
* @param cause the original exception
* @return IOException with user-friendly message
*/
public static IOException createPdfPasswordException(Exception cause) {
String message =
"The PDF Document is passworded and either the password was not provided or was incorrect";
return new IOException(message, cause);
}
/**
* Create an IOException with internationalized message for file processing errors.
*
* @param operation the operation being performed (e.g., "merge", "split", "convert")
* @param cause the original exception
* @return IOException with user-friendly message
*/
public static IOException createFileProcessingException(String operation, Exception cause) {
String message =
String.format(
"An error occurred while processing the file during %s operation: %s",
operation, cause.getMessage());
return new IOException(message, cause);
}
/**
* Create a generic IOException with internationalized message.
*
* @param messageKey the i18n message key
* @param defaultMessage the default message if i18n is not available
* @param cause the original exception
* @param args optional arguments for the message
* @return IOException with user-friendly message
*/
public static IOException createIOException(
String messageKey, String defaultMessage, Exception cause, Object... args) {
String message = MessageFormat.format(defaultMessage, args);
return new IOException(message, cause);
}
/**
* Create a generic RuntimeException with internationalized message.
*
* @param messageKey the i18n message key
* @param defaultMessage the default message if i18n is not available
* @param cause the original exception
* @param args optional arguments for the message
* @return RuntimeException with user-friendly message
*/
public static RuntimeException createRuntimeException(
String messageKey, String defaultMessage, Exception cause, Object... args) {
String message = MessageFormat.format(defaultMessage, args);
return new RuntimeException(message, cause);
}
/**
* Create an IllegalArgumentException with internationalized message.
*
* @param messageKey the i18n message key
* @param defaultMessage the default message if i18n is not available
* @param args optional arguments for the message
* @return IllegalArgumentException with user-friendly message
*/
public static IllegalArgumentException createIllegalArgumentException(
String messageKey, String defaultMessage, Object... args) {
String message = MessageFormat.format(defaultMessage, args);
return new IllegalArgumentException(message);
}
/** Create file validation exceptions. */
public static IllegalArgumentException createHtmlFileRequiredException() {
return createIllegalArgumentException(
"error.fileFormatRequired", "File must be in {0} format", "HTML or ZIP");
}
public static IllegalArgumentException createPdfFileRequiredException() {
return createIllegalArgumentException(
"error.fileFormatRequired", "File must be in {0} format", "PDF");
}
public static IllegalArgumentException createInvalidPageSizeException(String size) {
return createIllegalArgumentException(
"error.invalidFormat", "Invalid {0} format: {1}", "page size", size);
}
/** Create OCR-related exceptions. */
public static IOException createOcrLanguageRequiredException() {
return createIOException(
"error.optionsNotSpecified", "{0} options are not specified", null, "OCR language");
}
public static IOException createOcrInvalidLanguagesException() {
return createIOException(
"error.invalidFormat",
"Invalid {0} format: {1}",
null,
"OCR languages",
"none of the selected languages are valid");
}
public static IOException createOcrToolsUnavailableException() {
return createIOException(
"error.toolNotInstalled", "{0} is not installed", null, "OCR tools");
}
/** Create system requirement exceptions. */
public static IOException createPythonRequiredForWebpException() {
return createIOException(
"error.toolRequired", "{0} is required for {1}", null, "Python", "WebP conversion");
}
/** Create file operation exceptions. */
public static IOException createFileNotFoundException(String fileId) {
return createIOException("error.fileNotFound", "File not found with ID: {0}", null, fileId);
}
public static RuntimeException createPdfaConversionFailedException() {
return createRuntimeException(
"error.conversionFailed", "{0} conversion failed", null, "PDF/A");
}
public static IllegalArgumentException createInvalidComparatorException() {
return createIllegalArgumentException(
"error.invalidFormat",
"Invalid {0} format: {1}",
"comparator",
"only 'greater', 'equal', and 'less' are supported");
}
/** Create compression-related exceptions. */
public static RuntimeException createMd5AlgorithmException(Exception cause) {
return createRuntimeException(
"error.algorithmNotAvailable", "{0} algorithm not available", cause, "MD5");
}
public static IllegalArgumentException createCompressionOptionsException() {
return createIllegalArgumentException(
"error.optionsNotSpecified",
"{0} options are not specified",
"compression (expected output size and optimize level)");
}
public static IOException createGhostscriptCompressionException() {
return createIOException(
"error.commandFailed", "{0} command failed", null, "Ghostscript compression");
}
public static IOException createGhostscriptCompressionException(Exception cause) {
return createIOException(
"error.commandFailed", "{0} command failed", cause, "Ghostscript compression");
}
public static IOException createQpdfCompressionException(Exception cause) {
return createIOException("error.commandFailed", "{0} command failed", cause, "QPDF");
}
/**
* Check if an exception indicates a corrupted PDF and wrap it with appropriate message.
*
* @param e the exception to check
* @return the original exception if not PDF corruption, or a new IOException with user-friendly
* message
*/
public static IOException handlePdfException(IOException e) {
return handlePdfException(e, null);
}
/**
* Check if an exception indicates a corrupted PDF and wrap it with appropriate message.
*
* @param e the exception to check
* @param context additional context for the error
* @return the original exception if not PDF corruption, or a new IOException with user-friendly
* message
*/
public static IOException handlePdfException(IOException e, String context) {
if (PdfErrorUtils.isCorruptedPdfError(e)) {
return createPdfCorruptedException(context, e);
}
if (isEncryptionError(e)) {
return createPdfEncryptionException(e);
}
if (isPasswordError(e)) {
return createPdfPasswordException(e);
}
return e; // Return original exception if no specific handling needed
}
/**
* Check if an exception indicates a PDF encryption/decryption error.
*
* @param e the exception to check
* @return true if it's an encryption error, false otherwise
*/
public static boolean isEncryptionError(IOException e) {
String message = e.getMessage();
if (message == null) return false;
return message.contains("BadPaddingException")
|| message.contains("Given final block not properly padded")
|| message.contains("AES initialization vector not fully read")
|| message.contains("Failed to decrypt");
}
/**
* Check if an exception indicates a PDF password error.
*
* @param e the exception to check
* @return true if it's a password error, false otherwise
*/
public static boolean isPasswordError(IOException e) {
String message = e.getMessage();
if (message == null) return false;
return message.contains("password is incorrect")
|| message.contains("Password is not provided")
|| message.contains("PDF contains an encryption dictionary");
}
/**
* Log an exception with appropriate level based on its type.
*
* @param operation the operation being performed
* @param e the exception that occurred
*/
public static void logException(String operation, Exception e) {
if (PdfErrorUtils.isCorruptedPdfError(e)) {
log.warn("PDF corruption detected during {}: {}", operation, e.getMessage());
} else if (e instanceof IOException
&& (isEncryptionError((IOException) e) || isPasswordError((IOException) e))) {
log.info("PDF security issue during {}: {}", operation, e.getMessage());
} else {
log.error("Unexpected error during {}", operation, e);
}
}
/** Create common validation exceptions. */
public static IllegalArgumentException createInvalidArgumentException(String argumentName) {
return createIllegalArgumentException(
"error.invalidArgument", "Invalid argument: {0}", argumentName);
}
public static IllegalArgumentException createInvalidArgumentException(
String argumentName, String value) {
return createIllegalArgumentException(
"error.invalidFormat", "Invalid {0} format: {1}", argumentName, value);
}
public static IllegalArgumentException createNullArgumentException(String argumentName) {
return createIllegalArgumentException(
"error.argumentRequired", "{0} must not be null", argumentName);
}
}

View File

@ -1,31 +0,0 @@
package stirling.software.common.util;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ExecutorFactory {
/**
* Creates an ExecutorService using virtual threads if available (Java 21+), or falls back to a
* cached thread pool on older Java versions.
*/
public static ExecutorService newVirtualOrCachedThreadExecutor() {
try {
ExecutorService executor =
(ExecutorService)
Executors.class
.getMethod("newVirtualThreadPerTaskExecutor")
.invoke(null);
return executor;
} catch (NoSuchMethodException e) {
log.debug("Virtual threads not available; falling back to cached thread pool.");
} catch (Exception e) {
log.debug("Error initializing virtual thread executor: {}", e.getMessage(), e);
}
return Executors.newCachedThreadPool();
}
}

View File

@ -1,55 +0,0 @@
package stirling.software.common.util;
import java.io.IOException;
/** Utility class for detecting and handling PDF-related errors. */
public class PdfErrorUtils {
/**
* Checks if an IOException indicates a corrupted PDF file.
*
* @param e the IOException to check
* @return true if the error indicates PDF corruption, false otherwise
*/
public static boolean isCorruptedPdfError(IOException e) {
return isCorruptedPdfError(e.getMessage());
}
/**
* Checks if any Exception indicates a corrupted PDF file.
*
* @param e the Exception to check
* @return true if the error indicates PDF corruption, false otherwise
*/
public static boolean isCorruptedPdfError(Exception e) {
return isCorruptedPdfError(e.getMessage());
}
/**
* Checks if an error message indicates a corrupted PDF file.
*
* @param message the error message to check
* @return true if the message indicates PDF corruption, false otherwise
*/
private static boolean isCorruptedPdfError(String message) {
if (message == null) return false;
// Check for common corruption indicators
return message.contains("Missing root object specification")
|| message.contains("Header doesn't contain versioninfo")
|| message.contains("Expected trailer")
|| message.contains("Invalid PDF")
|| message.contains("Corrupted")
|| message.contains("damaged")
|| message.contains("Unknown dir object")
|| message.contains("Can't dereference COSObject")
|| message.contains("parseCOSString string should start with")
|| message.contains("ICCBased colorspace array must have a stream")
|| message.contains("1-based index not found")
|| message.contains("Invalid dictionary, found:")
|| message.contains("AES initialization vector not fully read")
|| message.contains("BadPaddingException")
|| message.contains("Given final block not properly padded")
|| message.contains("End-of-File, expected line");
}
}

View File

@ -1,82 +0,0 @@
package stirling.software.common.util;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
import lombok.extern.slf4j.Slf4j;
/**
* Utility class to access Spring managed beans from non-Spring managed classes. This is especially
* useful for classes that are instantiated by frameworks or created dynamically.
*/
@Component
@Slf4j
public class SpringContextHolder implements ApplicationContextAware {
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
SpringContextHolder.applicationContext = applicationContext;
log.debug("Spring context holder initialized");
}
/**
* Get a Spring bean by class type
*
* @param <T> The bean type
* @param beanClass The bean class
* @return The bean instance, or null if not found
*/
public static <T> T getBean(Class<T> beanClass) {
if (applicationContext == null) {
log.warn(
"Application context not initialized when attempting to get bean of type {}",
beanClass.getName());
return null;
}
try {
return applicationContext.getBean(beanClass);
} catch (BeansException e) {
log.error("Error getting bean of type {}: {}", beanClass.getName(), e.getMessage());
return null;
}
}
/**
* Get a Spring bean by name
*
* @param <T> The bean type
* @param beanName The bean name
* @return The bean instance, or null if not found
*/
public static <T> T getBean(String beanName) {
if (applicationContext == null) {
log.warn(
"Application context not initialized when attempting to get bean '{}'",
beanName);
return null;
}
try {
@SuppressWarnings("unchecked")
T bean = (T) applicationContext.getBean(beanName);
return bean;
} catch (BeansException e) {
log.error("Error getting bean '{}': {}", beanName, e.getMessage());
return null;
}
}
/**
* Check if the application context is initialized
*
* @return true if initialized, false otherwise
*/
public static boolean isInitialized() {
return applicationContext != null;
}
}

View File

@ -1,44 +0,0 @@
package stirling.software.common.util;
import java.io.IOException;
import java.nio.file.Path;
import lombok.extern.slf4j.Slf4j;
/**
* A wrapper class for a temporary directory that implements AutoCloseable. Can be used with
* try-with-resources for automatic cleanup.
*/
@Slf4j
public class TempDirectory implements AutoCloseable {
private final TempFileManager manager;
private final Path directory;
public TempDirectory(TempFileManager manager) throws IOException {
this.manager = manager;
this.directory = manager.createTempDirectory();
}
public Path getPath() {
return directory;
}
public String getAbsolutePath() {
return directory.toAbsolutePath().toString();
}
public boolean exists() {
return java.nio.file.Files.exists(directory);
}
@Override
public void close() {
manager.deleteTempDirectory(directory);
}
@Override
public String toString() {
return "TempDirectory{" + directory.toAbsolutePath() + "}";
}
}

View File

@ -1,49 +0,0 @@
package stirling.software.common.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import lombok.extern.slf4j.Slf4j;
/**
* A wrapper class for a temporary file that implements AutoCloseable. Can be used with
* try-with-resources for automatic cleanup.
*/
@Slf4j
public class TempFile implements AutoCloseable {
private final TempFileManager manager;
private final File file;
public TempFile(TempFileManager manager, String suffix) throws IOException {
this.manager = manager;
this.file = manager.createTempFile(suffix);
}
public File getFile() {
return file;
}
public Path getPath() {
return file.toPath();
}
public String getAbsolutePath() {
return file.getAbsolutePath();
}
public boolean exists() {
return file.exists();
}
@Override
public void close() {
manager.deleteTempFile(file);
}
@Override
public String toString() {
return "TempFile{" + file.getAbsolutePath() + "}";
}
}

View File

@ -1,249 +0,0 @@
package stirling.software.common.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Set;
import java.util.UUID;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import stirling.software.common.model.ApplicationProperties;
/**
* Service for managing temporary files in Stirling-PDF. Provides methods for creating, tracking,
* and cleaning up temporary files.
*/
@Slf4j
@Service
@RequiredArgsConstructor
public class TempFileManager {
private final TempFileRegistry registry;
private final ApplicationProperties applicationProperties;
/**
* Create a temporary file with the Stirling-PDF prefix. The file is automatically registered
* with the registry.
*
* @param suffix The suffix for the temporary file
* @return The created temporary file
* @throws IOException If an I/O error occurs
*/
public File createTempFile(String suffix) throws IOException {
ApplicationProperties.TempFileManagement tempFiles =
applicationProperties.getSystem().getTempFileManagement();
Path tempFilePath;
String customTempDirectory = tempFiles.getBaseTmpDir();
if (customTempDirectory != null && !customTempDirectory.isEmpty()) {
Path tempDir = Path.of(customTempDirectory);
if (!Files.exists(tempDir)) {
Files.createDirectories(tempDir);
}
tempFilePath = Files.createTempFile(tempDir, tempFiles.getPrefix(), suffix);
} else {
tempFilePath = Files.createTempFile(tempFiles.getPrefix(), suffix);
}
File tempFile = tempFilePath.toFile();
return registry.register(tempFile);
}
/**
* Create a temporary directory with the Stirling-PDF prefix. The directory is automatically
* registered with the registry.
*
* @return The created temporary directory
* @throws IOException If an I/O error occurs
*/
public Path createTempDirectory() throws IOException {
ApplicationProperties.TempFileManagement tempFiles =
applicationProperties.getSystem().getTempFileManagement();
Path tempDirPath;
String customTempDirectory = tempFiles.getBaseTmpDir();
if (customTempDirectory != null && !customTempDirectory.isEmpty()) {
Path tempDir = Path.of(customTempDirectory);
if (!Files.exists(tempDir)) {
Files.createDirectories(tempDir);
}
tempDirPath = Files.createTempDirectory(tempDir, tempFiles.getPrefix());
} else {
tempDirPath = Files.createTempDirectory(tempFiles.getPrefix());
}
return registry.registerDirectory(tempDirPath);
}
/**
* Convert a MultipartFile to a temporary File and register it. This is a wrapper around
* GeneralUtils.convertMultipartFileToFile that ensures the created temp file is registered.
*
* @param multipartFile The MultipartFile to convert
* @return The created temporary file
* @throws IOException If an I/O error occurs
*/
public File convertMultipartFileToFile(MultipartFile multipartFile) throws IOException {
File tempFile = GeneralUtils.convertMultipartFileToFile(multipartFile);
return registry.register(tempFile);
}
/**
* Delete a temporary file and unregister it from the registry.
*
* @param file The file to delete
* @return true if the file was deleted successfully, false otherwise
*/
public boolean deleteTempFile(File file) {
if (file != null && file.exists()) {
boolean deleted = file.delete();
if (deleted) {
registry.unregister(file);
log.debug("Deleted temp file: {}", file.getAbsolutePath());
} else {
log.warn("Failed to delete temp file: {}", file.getAbsolutePath());
}
return deleted;
}
return false;
}
/**
* Delete a temporary file and unregister it from the registry.
*
* @param path The path to delete
* @return true if the file was deleted successfully, false otherwise
*/
public boolean deleteTempFile(Path path) {
if (path != null) {
try {
boolean deleted = Files.deleteIfExists(path);
if (deleted) {
registry.unregister(path);
log.debug("Deleted temp file: {}", path.toString());
} else {
log.debug("Temp file already deleted or does not exist: {}", path.toString());
}
return deleted;
} catch (IOException e) {
log.warn("Failed to delete temp file: {}", path.toString(), e);
return false;
}
}
return false;
}
/**
* Delete a temporary directory and all its contents.
*
* @param directory The directory to delete
*/
public void deleteTempDirectory(Path directory) {
if (directory != null && Files.isDirectory(directory)) {
try {
GeneralUtils.deleteDirectory(directory);
log.debug("Deleted temp directory: {}", directory.toString());
} catch (IOException e) {
log.warn("Failed to delete temp directory: {}", directory.toString(), e);
}
}
}
/**
* Register an existing file with the registry.
*
* @param file The file to register
* @return The same file for method chaining
*/
public File register(File file) {
if (file != null && file.exists()) {
return registry.register(file);
}
return file;
}
/**
* Clean up old temporary files based on age.
*
* @param maxAgeMillis Maximum age in milliseconds for temp files
* @return Number of files deleted
*/
public int cleanupOldTempFiles(long maxAgeMillis) {
int deletedCount = 0;
// Get files older than max age
Set<Path> oldFiles = registry.getFilesOlderThan(maxAgeMillis);
// Delete each old file
for (Path file : oldFiles) {
if (deleteTempFile(file)) {
deletedCount++;
}
}
log.info("Cleaned up {} old temporary files", deletedCount);
return deletedCount;
}
/**
* Get the maximum age for temporary files in milliseconds.
*
* @return Maximum age in milliseconds
*/
public long getMaxAgeMillis() {
long maxAgeHours =
applicationProperties.getSystem().getTempFileManagement().getMaxAgeHours();
return Duration.ofHours(maxAgeHours).toMillis();
}
/**
* Generate a unique temporary file name with the Stirling-PDF prefix.
*
* @param type Type identifier for the temp file
* @param extension File extension (without the dot)
* @return A unique temporary file name
*/
public String generateTempFileName(String type, String extension) {
String tempFilePrefix =
applicationProperties.getSystem().getTempFileManagement().getPrefix();
String uuid = UUID.randomUUID().toString().substring(0, 8);
return tempFilePrefix + type + "-" + uuid + "." + extension;
}
/**
* Register a known LibreOffice temporary directory. This is used when integrating with
* LibreOffice for file conversions.
*
* @return The LibreOffice temp directory
* @throws IOException If directory creation fails
*/
public Path registerLibreOfficeTempDir() throws IOException {
ApplicationProperties.TempFileManagement tempFiles =
applicationProperties.getSystem().getTempFileManagement();
Path loTempDir;
String libreOfficeTempDir = tempFiles.getLibreofficeDir();
String customTempDirectory = tempFiles.getBaseTmpDir();
// First check if explicitly configured
if (libreOfficeTempDir != null && !libreOfficeTempDir.isEmpty()) {
loTempDir = Path.of(libreOfficeTempDir);
}
// Next check if we have a custom temp directory
else if (customTempDirectory != null && !customTempDirectory.isEmpty()) {
loTempDir = Path.of(customTempDirectory, "libreoffice");
}
// Fall back to system temp dir with our application prefix
else {
loTempDir = Path.of(System.getProperty("java.io.tmpdir"), "stirling-pdf-libreoffice");
}
if (!Files.exists(loTempDir)) {
Files.createDirectories(loTempDir);
}
return registry.registerDirectory(loTempDir);
}
}

View File

@ -1,174 +0,0 @@
package stirling.software.common.util;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.stream.Collectors;
import org.springframework.stereotype.Component;
import lombok.extern.slf4j.Slf4j;
/**
* Central registry for tracking temporary files created by Stirling-PDF. Maintains a thread-safe
* collection of paths with their creation timestamps.
*/
@Slf4j
@Component
public class TempFileRegistry {
private final ConcurrentMap<Path, Instant> registeredFiles = new ConcurrentHashMap<>();
private final Set<Path> thirdPartyTempFiles =
Collections.newSetFromMap(new ConcurrentHashMap<>());
private final Set<Path> tempDirectories = Collections.newSetFromMap(new ConcurrentHashMap<>());
/**
* Register a temporary file with the registry.
*
* @param file The temporary file to track
* @return The same file for method chaining
*/
public File register(File file) {
if (file != null) {
registeredFiles.put(file.toPath(), Instant.now());
log.debug("Registered temp file: {}", file.getAbsolutePath());
}
return file;
}
/**
* Register a temporary path with the registry.
*
* @param path The temporary path to track
* @return The same path for method chaining
*/
public Path register(Path path) {
if (path != null) {
registeredFiles.put(path, Instant.now());
log.debug("Registered temp path: {}", path.toString());
}
return path;
}
/**
* Register a temporary directory to be cleaned up.
*
* @param directory Directory to register
* @return The same directory for method chaining
*/
public Path registerDirectory(Path directory) {
if (directory != null && Files.isDirectory(directory)) {
tempDirectories.add(directory);
log.debug("Registered temp directory: {}", directory.toString());
}
return directory;
}
/**
* Register a third-party temporary file that requires special handling.
*
* @param file The third-party temp file
* @return The same file for method chaining
*/
public File registerThirdParty(File file) {
if (file != null) {
thirdPartyTempFiles.add(file.toPath());
log.debug("Registered third-party temp file: {}", file.getAbsolutePath());
}
return file;
}
/**
* Unregister a file from the registry.
*
* @param file The file to unregister
*/
public void unregister(File file) {
if (file != null) {
registeredFiles.remove(file.toPath());
thirdPartyTempFiles.remove(file.toPath());
log.debug("Unregistered temp file: {}", file.getAbsolutePath());
}
}
/**
* Unregister a path from the registry.
*
* @param path The path to unregister
*/
public void unregister(Path path) {
if (path != null) {
registeredFiles.remove(path);
thirdPartyTempFiles.remove(path);
log.debug("Unregistered temp path: {}", path.toString());
}
}
/**
* Get all registered temporary files.
*
* @return Set of registered file paths
*/
public Set<Path> getAllRegisteredFiles() {
return registeredFiles.keySet();
}
/**
* Get temporary files older than the specified duration in milliseconds.
*
* @param maxAgeMillis Maximum age in milliseconds
* @return Set of paths older than the specified age
*/
public Set<Path> getFilesOlderThan(long maxAgeMillis) {
Instant cutoffTime = Instant.now().minusMillis(maxAgeMillis);
return registeredFiles.entrySet().stream()
.filter(entry -> entry.getValue().isBefore(cutoffTime))
.map(Map.Entry::getKey)
.collect(Collectors.toSet());
}
/**
* Get all registered third-party temporary files.
*
* @return Set of third-party file paths
*/
public Set<Path> getThirdPartyTempFiles() {
return thirdPartyTempFiles;
}
/**
* Get all registered temporary directories.
*
* @return Set of temporary directory paths
*/
public Set<Path> getTempDirectories() {
return tempDirectories;
}
/**
* Check if a file is registered in the registry.
*
* @param file The file to check
* @return True if the file is registered, false otherwise
*/
public boolean contains(File file) {
if (file == null) {
return false;
}
Path path = file.toPath();
return registeredFiles.containsKey(path) || thirdPartyTempFiles.contains(path);
}
/** Clear all registry data. */
public void clear() {
registeredFiles.clear();
thirdPartyTempFiles.clear();
tempDirectories.clear();
}
}

View File

@ -1,135 +0,0 @@
package stirling.software.common.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
import lombok.extern.slf4j.Slf4j;
/**
* Utility class for handling temporary files with proper cleanup. Provides helper methods and
* wrappers to ensure temp files are properly cleaned up.
*/
@Slf4j
public class TempFileUtil {
/**
* A collection of temporary files that implements AutoCloseable. All files in the collection
* are cleaned up when close() is called.
*/
public static class TempFileCollection implements AutoCloseable {
private final TempFileManager manager;
private final List<File> tempFiles = new ArrayList<>();
public TempFileCollection(TempFileManager manager) {
this.manager = manager;
}
public File addTempFile(String suffix) throws IOException {
File file = manager.createTempFile(suffix);
tempFiles.add(file);
return file;
}
public List<File> getFiles() {
return new ArrayList<>(tempFiles);
}
@Override
public void close() {
for (File file : tempFiles) {
manager.deleteTempFile(file);
}
}
}
/**
* Execute a function with a temporary file, ensuring cleanup in a finally block.
*
* @param <R> The return type of the function
* @param tempFileManager The temp file manager
* @param suffix File suffix (e.g., ".pdf")
* @param function The function to execute with the temp file
* @return The result of the function
* @throws IOException If an I/O error occurs
*/
public static <R> R withTempFile(
TempFileManager tempFileManager, String suffix, Function<File, R> function)
throws IOException {
File tempFile = tempFileManager.createTempFile(suffix);
try {
return function.apply(tempFile);
} finally {
tempFileManager.deleteTempFile(tempFile);
}
}
/**
* Execute a function with multiple temporary files, ensuring cleanup in a finally block.
*
* @param <R> The return type of the function
* @param tempFileManager The temp file manager
* @param count Number of temp files to create
* @param suffix File suffix (e.g., ".pdf")
* @param function The function to execute with the temp files
* @return The result of the function
* @throws IOException If an I/O error occurs
*/
public static <R> R withMultipleTempFiles(
TempFileManager tempFileManager,
int count,
String suffix,
Function<List<File>, R> function)
throws IOException {
List<File> tempFiles = new ArrayList<>(count);
try {
for (int i = 0; i < count; i++) {
tempFiles.add(tempFileManager.createTempFile(suffix));
}
return function.apply(tempFiles);
} finally {
for (File file : tempFiles) {
tempFileManager.deleteTempFile(file);
}
}
}
/**
* Safely delete a list of temporary files, logging any errors.
*
* @param files The list of files to delete
*/
public static void safeDeleteFiles(List<Path> files) {
if (files == null) return;
for (Path file : files) {
if (file == null) continue;
try {
Files.deleteIfExists(file);
log.debug("Deleted temp file: {}", file);
} catch (IOException e) {
log.warn("Failed to delete temp file: {}", file, e);
}
}
}
/**
* Register an already created temp file with the registry. Use this for files created outside
* of TempFileManager.
*
* @param tempFileManager The temp file manager
* @param file The file to register
* @return The registered file
*/
public static File registerExistingTempFile(TempFileManager tempFileManager, File file) {
if (tempFileManager != null && file != null && file.exists()) {
return tempFileManager.register(file);
}
return file;
}
}

View File

@ -1,14 +0,0 @@
package stirling.software.common.util;
import java.util.Collection;
public class ValidationUtil {
public static boolean isStringEmpty(String input) {
return input == null || input.isBlank();
}
public static boolean isCollectionEmpty(Collection<String> input) {
return input == null || input.isEmpty();
}
}

View File

@ -1,14 +0,0 @@
package stirling.software.common.util;
import java.util.Collection;
public class ValidationUtils {
public static boolean isStringEmpty(String input) {
return input == null || input.isBlank();
}
public static boolean isCollectionEmpty(Collection<String> input) {
return input == null || input.isEmpty();
}
}

View File

@ -1,208 +0,0 @@
package stirling.software.common.annotations;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.BeforeEach;
import java.util.Arrays;
import java.util.function.Supplier;
import org.aspectj.lang.ProceedingJoinPoint;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.ResponseEntity;
import org.springframework.web.multipart.MultipartFile;
import jakarta.servlet.http.HttpServletRequest;
import stirling.software.common.aop.AutoJobAspect;
import stirling.software.common.model.api.PDFFile;
import stirling.software.common.service.FileOrUploadService;
import stirling.software.common.service.FileStorage;
import stirling.software.common.service.JobExecutorService;
import stirling.software.common.service.JobQueue;
import stirling.software.common.service.ResourceMonitor;
@ExtendWith(MockitoExtension.class)
class AutoJobPostMappingIntegrationTest {
private AutoJobAspect autoJobAspect;
@Mock
private JobExecutorService jobExecutorService;
@Mock
private HttpServletRequest request;
@Mock
private FileOrUploadService fileOrUploadService;
@Mock
private FileStorage fileStorage;
@Mock
private ResourceMonitor resourceMonitor;
@Mock
private JobQueue jobQueue;
@BeforeEach
void setUp() {
autoJobAspect = new AutoJobAspect(
jobExecutorService,
request,
fileOrUploadService,
fileStorage
);
}
@Mock
private ProceedingJoinPoint joinPoint;
@Mock
private AutoJobPostMapping autoJobPostMapping;
@Captor
private ArgumentCaptor<Supplier<Object>> workCaptor;
@Captor
private ArgumentCaptor<Boolean> asyncCaptor;
@Captor
private ArgumentCaptor<Long> timeoutCaptor;
@Captor
private ArgumentCaptor<Boolean> queueableCaptor;
@Captor
private ArgumentCaptor<Integer> resourceWeightCaptor;
@Test
void shouldExecuteWithCustomParameters() throws Throwable {
// Given
PDFFile pdfFile = new PDFFile();
pdfFile.setFileId("test-file-id");
Object[] args = new Object[] { pdfFile };
when(joinPoint.getArgs()).thenReturn(args);
when(request.getParameter("async")).thenReturn("true");
when(autoJobPostMapping.timeout()).thenReturn(60000L);
when(autoJobPostMapping.retryCount()).thenReturn(3);
when(autoJobPostMapping.trackProgress()).thenReturn(true);
when(autoJobPostMapping.queueable()).thenReturn(true);
when(autoJobPostMapping.resourceWeight()).thenReturn(75);
MultipartFile mockFile = mock(MultipartFile.class);
when(fileStorage.retrieveFile("test-file-id")).thenReturn(mockFile);
when(jobExecutorService.runJobGeneric(
anyBoolean(), any(Supplier.class), anyLong(), anyBoolean(), anyInt()))
.thenReturn(ResponseEntity.ok("success"));
// When
Object result = autoJobAspect.wrapWithJobExecution(joinPoint, autoJobPostMapping);
// Then
assertEquals(ResponseEntity.ok("success"), result);
verify(jobExecutorService).runJobGeneric(
asyncCaptor.capture(),
workCaptor.capture(),
timeoutCaptor.capture(),
queueableCaptor.capture(),
resourceWeightCaptor.capture());
assertTrue(asyncCaptor.getValue(), "Async should be true");
assertEquals(60000L, timeoutCaptor.getValue(), "Timeout should be 60000ms");
assertTrue(queueableCaptor.getValue(), "Queueable should be true");
assertEquals(75, resourceWeightCaptor.getValue(), "Resource weight should be 75");
// Test that file was resolved
assertNotNull(pdfFile.getFileInput(), "File input should be set");
}
@Test
void shouldRetryOnError() throws Throwable {
// Given
when(joinPoint.getArgs()).thenReturn(new Object[0]);
when(request.getParameter("async")).thenReturn("false");
when(autoJobPostMapping.timeout()).thenReturn(-1L);
when(autoJobPostMapping.retryCount()).thenReturn(2);
when(autoJobPostMapping.trackProgress()).thenReturn(false);
when(autoJobPostMapping.queueable()).thenReturn(false);
when(autoJobPostMapping.resourceWeight()).thenReturn(50);
// First call throws exception, second succeeds
when(joinPoint.proceed(any()))
.thenThrow(new RuntimeException("First attempt failed"))
.thenReturn(ResponseEntity.ok("retry succeeded"));
// Mock jobExecutorService to execute the work immediately
when(jobExecutorService.runJobGeneric(
anyBoolean(), any(Supplier.class), anyLong(), anyBoolean(), anyInt()))
.thenAnswer(invocation -> {
Supplier<Object> work = invocation.getArgument(1);
return work.get();
});
// When
Object result = autoJobAspect.wrapWithJobExecution(joinPoint, autoJobPostMapping);
// Then
assertEquals(ResponseEntity.ok("retry succeeded"), result);
// Verify that proceed was called twice (initial attempt + 1 retry)
verify(joinPoint, times(2)).proceed(any());
}
@Test
void shouldHandlePDFFileWithAsyncRequests() throws Throwable {
// Given
PDFFile pdfFile = new PDFFile();
pdfFile.setFileInput(mock(MultipartFile.class));
Object[] args = new Object[] { pdfFile };
when(joinPoint.getArgs()).thenReturn(args);
when(request.getParameter("async")).thenReturn("true");
when(autoJobPostMapping.retryCount()).thenReturn(1);
when(fileStorage.storeFile(any(MultipartFile.class))).thenReturn("stored-file-id");
when(fileStorage.retrieveFile("stored-file-id")).thenReturn(mock(MultipartFile.class));
// Mock job executor to return a successful response
when(jobExecutorService.runJobGeneric(
anyBoolean(), any(Supplier.class), anyLong(), anyBoolean(), anyInt()))
.thenReturn(ResponseEntity.ok("success"));
// When
autoJobAspect.wrapWithJobExecution(joinPoint, autoJobPostMapping);
// Then
assertEquals("stored-file-id", pdfFile.getFileId(),
"FileId should be set to the stored file id");
assertNotNull(pdfFile.getFileInput(), "FileInput should be replaced with persistent file");
// Verify storage operations
verify(fileStorage).storeFile(any(MultipartFile.class));
verify(fileStorage).retrieveFile("stored-file-id");
}
}

View File

@ -1,223 +0,0 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
import static stirling.software.common.service.SpyPDFDocumentFactory.*;
import java.io.*;
import java.nio.file.*;
import java.nio.file.Files;
import java.util.Arrays;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.*;
import org.apache.pdfbox.pdmodel.common.PDStream;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.parallel.Execution;
import org.junit.jupiter.api.parallel.ExecutionMode;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.springframework.mock.web.MockMultipartFile;
import stirling.software.common.model.api.PDFFile;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Execution(value = ExecutionMode.SAME_THREAD)
class CustomPDFDocumentFactoryTest {
private SpyPDFDocumentFactory factory;
private byte[] basePdfBytes;
@BeforeEach
void setup() throws IOException {
PdfMetadataService mockService = mock(PdfMetadataService.class);
factory = new SpyPDFDocumentFactory(mockService);
try (InputStream is = getClass().getResourceAsStream("/example.pdf")) {
assertNotNull(is, "example.pdf must be present in src/test/resources");
basePdfBytes = is.readAllBytes();
}
}
@ParameterizedTest
@CsvSource({"5,MEMORY_ONLY", "20,MIXED", "60,TEMP_FILE"})
void testStrategy_FileInput(int sizeMB, StrategyType expected) throws IOException {
File file = writeTempFile(inflatePdf(basePdfBytes, sizeMB));
try (PDDocument doc = factory.load(file)) {
Assertions.assertEquals(expected, factory.lastStrategyUsed);
}
}
@ParameterizedTest
@CsvSource({"5,MEMORY_ONLY", "20,MIXED", "60,TEMP_FILE"})
void testStrategy_ByteArray(int sizeMB, StrategyType expected) throws IOException {
byte[] inflated = inflatePdf(basePdfBytes, sizeMB);
try (PDDocument doc = factory.load(inflated)) {
Assertions.assertEquals(expected, factory.lastStrategyUsed);
}
}
@ParameterizedTest
@CsvSource({"5,MEMORY_ONLY", "20,MIXED", "60,TEMP_FILE"})
void testStrategy_InputStream(int sizeMB, StrategyType expected) throws IOException {
byte[] inflated = inflatePdf(basePdfBytes, sizeMB);
try (PDDocument doc = factory.load(new ByteArrayInputStream(inflated))) {
Assertions.assertEquals(expected, factory.lastStrategyUsed);
}
}
@ParameterizedTest
@CsvSource({"5,MEMORY_ONLY", "20,MIXED", "60,TEMP_FILE"})
void testStrategy_MultipartFile(int sizeMB, StrategyType expected) throws IOException {
byte[] inflated = inflatePdf(basePdfBytes, sizeMB);
MockMultipartFile multipart =
new MockMultipartFile("file", "doc.pdf", "application/pdf", inflated);
try (PDDocument doc = factory.load(multipart)) {
Assertions.assertEquals(expected, factory.lastStrategyUsed);
}
}
@ParameterizedTest
@CsvSource({"5,MEMORY_ONLY", "20,MIXED", "60,TEMP_FILE"})
void testStrategy_PDFFile(int sizeMB, StrategyType expected) throws IOException {
byte[] inflated = inflatePdf(basePdfBytes, sizeMB);
MockMultipartFile multipart =
new MockMultipartFile("file", "doc.pdf", "application/pdf", inflated);
PDFFile pdfFile = new PDFFile();
pdfFile.setFileInput(multipart);
try (PDDocument doc = factory.load(pdfFile)) {
Assertions.assertEquals(expected, factory.lastStrategyUsed);
}
}
private byte[] inflatePdf(byte[] input, int sizeInMB) throws IOException {
try (PDDocument doc = Loader.loadPDF(input)) {
byte[] largeData = new byte[sizeInMB * 1024 * 1024];
Arrays.fill(largeData, (byte) 'A');
PDStream stream = new PDStream(doc, new ByteArrayInputStream(largeData));
stream.getCOSObject().setItem(COSName.TYPE, COSName.XOBJECT);
stream.getCOSObject().setItem(COSName.SUBTYPE, COSName.IMAGE);
doc.getDocumentCatalog()
.getCOSObject()
.setItem(COSName.getPDFName("DummyBigStream"), stream.getCOSObject());
ByteArrayOutputStream out = new ByteArrayOutputStream();
doc.save(out);
return out.toByteArray();
}
}
@Test
void testLoadFromPath() throws IOException {
File file = writeTempFile(inflatePdf(basePdfBytes, 5));
Path path = file.toPath();
try (PDDocument doc = factory.load(path)) {
assertNotNull(doc);
}
}
@Test
void testLoadFromStringPath() throws IOException {
File file = writeTempFile(inflatePdf(basePdfBytes, 5));
try (PDDocument doc = factory.load(file.getAbsolutePath())) {
assertNotNull(doc);
}
}
// neeed to add password pdf
// @Test
// void testLoadPasswordProtectedPdfFromInputStream() throws IOException {
// try (InputStream is = getClass().getResourceAsStream("/protected.pdf")) {
// assertNotNull(is, "protected.pdf must be present in src/test/resources");
// try (PDDocument doc = factory.load(is, "test123")) {
// assertNotNull(doc);
// }
// }
// }
//
// @Test
// void testLoadPasswordProtectedPdfFromMultipart() throws IOException {
// try (InputStream is = getClass().getResourceAsStream("/protected.pdf")) {
// assertNotNull(is, "protected.pdf must be present in src/test/resources");
// byte[] bytes = is.readAllBytes();
// MockMultipartFile file = new MockMultipartFile("file", "protected.pdf",
// "application/pdf", bytes);
// try (PDDocument doc = factory.load(file, "test123")) {
// assertNotNull(doc);
// }
// }
// }
@Test
void testLoadReadOnlySkipsPostProcessing() throws IOException {
PdfMetadataService mockService = mock(PdfMetadataService.class);
CustomPDFDocumentFactory readOnlyFactory = new CustomPDFDocumentFactory(mockService);
byte[] bytes = inflatePdf(basePdfBytes, 5);
try (PDDocument doc = readOnlyFactory.load(bytes, true)) {
assertNotNull(doc);
verify(mockService, never()).setDefaultMetadata(any());
}
}
@Test
void testCreateNewDocument() throws IOException {
try (PDDocument doc = factory.createNewDocument()) {
assertNotNull(doc);
}
}
@Test
void testCreateNewDocumentBasedOnOldDocument() throws IOException {
byte[] inflated = inflatePdf(basePdfBytes, 5);
try (PDDocument oldDoc = Loader.loadPDF(inflated);
PDDocument newDoc = factory.createNewDocumentBasedOnOldDocument(oldDoc)) {
assertNotNull(newDoc);
}
}
@Test
void testLoadToBytesRoundTrip() throws IOException {
byte[] inflated = inflatePdf(basePdfBytes, 5);
File file = writeTempFile(inflated);
byte[] resultBytes = factory.loadToBytes(file);
try (PDDocument doc = Loader.loadPDF(resultBytes)) {
assertNotNull(doc);
assertTrue(doc.getNumberOfPages() > 0);
}
}
@Test
void testSaveToBytesAndReload() throws IOException {
try (PDDocument doc = Loader.loadPDF(basePdfBytes)) {
byte[] saved = factory.saveToBytes(doc);
try (PDDocument reloaded = Loader.loadPDF(saved)) {
assertNotNull(reloaded);
assertEquals(doc.getNumberOfPages(), reloaded.getNumberOfPages());
}
}
}
@Test
void testCreateNewBytesBasedOnOldDocument() throws IOException {
byte[] newBytes = factory.createNewBytesBasedOnOldDocument(basePdfBytes);
assertNotNull(newBytes);
assertTrue(newBytes.length > 0);
}
private File writeTempFile(byte[] content) throws IOException {
File file = Files.createTempFile("pdf-test-", ".pdf").toFile();
Files.write(file.toPath(), content);
return file;
}
@BeforeEach
void cleanup() {
System.gc();
}
}

View File

@ -1,190 +0,0 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
import static org.mockito.AdditionalAnswers.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.UUID;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.web.multipart.MultipartFile;
class FileStorageTest {
@TempDir
Path tempDir;
@Mock
private FileOrUploadService fileOrUploadService;
@InjectMocks
private FileStorage fileStorage;
private MultipartFile mockFile;
@BeforeEach
void setUp() {
MockitoAnnotations.openMocks(this);
ReflectionTestUtils.setField(fileStorage, "tempDirPath", tempDir.toString());
// Create a mock MultipartFile
mockFile = mock(MultipartFile.class);
when(mockFile.getOriginalFilename()).thenReturn("test.pdf");
when(mockFile.getContentType()).thenReturn("application/pdf");
}
@Test
void testStoreFile() throws IOException {
// Arrange
byte[] fileContent = "Test PDF content".getBytes();
when(mockFile.getBytes()).thenReturn(fileContent);
// Set up mock to handle transferTo by writing the file
doAnswer(invocation -> {
java.io.File file = invocation.getArgument(0);
Files.write(file.toPath(), fileContent);
return null;
}).when(mockFile).transferTo(any(java.io.File.class));
// Act
String fileId = fileStorage.storeFile(mockFile);
// Assert
assertNotNull(fileId);
assertTrue(Files.exists(tempDir.resolve(fileId)));
verify(mockFile).transferTo(any(java.io.File.class));
}
@Test
void testStoreBytes() throws IOException {
// Arrange
byte[] fileContent = "Test PDF content".getBytes();
String originalName = "test.pdf";
// Act
String fileId = fileStorage.storeBytes(fileContent, originalName);
// Assert
assertNotNull(fileId);
assertTrue(Files.exists(tempDir.resolve(fileId)));
assertArrayEquals(fileContent, Files.readAllBytes(tempDir.resolve(fileId)));
}
@Test
void testRetrieveFile() throws IOException {
// Arrange
byte[] fileContent = "Test PDF content".getBytes();
String fileId = UUID.randomUUID().toString();
Path filePath = tempDir.resolve(fileId);
Files.write(filePath, fileContent);
MultipartFile expectedFile = mock(MultipartFile.class);
when(fileOrUploadService.toMockMultipartFile(eq(fileId), eq(fileContent)))
.thenReturn(expectedFile);
// Act
MultipartFile result = fileStorage.retrieveFile(fileId);
// Assert
assertSame(expectedFile, result);
verify(fileOrUploadService).toMockMultipartFile(eq(fileId), eq(fileContent));
}
@Test
void testRetrieveBytes() throws IOException {
// Arrange
byte[] fileContent = "Test PDF content".getBytes();
String fileId = UUID.randomUUID().toString();
Path filePath = tempDir.resolve(fileId);
Files.write(filePath, fileContent);
// Act
byte[] result = fileStorage.retrieveBytes(fileId);
// Assert
assertArrayEquals(fileContent, result);
}
@Test
void testRetrieveFile_FileNotFound() {
// Arrange
String nonExistentFileId = "non-existent-file";
// Act & Assert
assertThrows(IOException.class, () -> fileStorage.retrieveFile(nonExistentFileId));
}
@Test
void testRetrieveBytes_FileNotFound() {
// Arrange
String nonExistentFileId = "non-existent-file";
// Act & Assert
assertThrows(IOException.class, () -> fileStorage.retrieveBytes(nonExistentFileId));
}
@Test
void testDeleteFile() throws IOException {
// Arrange
byte[] fileContent = "Test PDF content".getBytes();
String fileId = UUID.randomUUID().toString();
Path filePath = tempDir.resolve(fileId);
Files.write(filePath, fileContent);
// Act
boolean result = fileStorage.deleteFile(fileId);
// Assert
assertTrue(result);
assertFalse(Files.exists(filePath));
}
@Test
void testDeleteFile_FileNotFound() {
// Arrange
String nonExistentFileId = "non-existent-file";
// Act
boolean result = fileStorage.deleteFile(nonExistentFileId);
// Assert
assertFalse(result);
}
@Test
void testFileExists() throws IOException {
// Arrange
byte[] fileContent = "Test PDF content".getBytes();
String fileId = UUID.randomUUID().toString();
Path filePath = tempDir.resolve(fileId);
Files.write(filePath, fileContent);
// Act
boolean result = fileStorage.fileExists(fileId);
// Assert
assertTrue(result);
}
@Test
void testFileExists_FileNotFound() {
// Arrange
String nonExistentFileId = "non-existent-file";
// Act
boolean result = fileStorage.fileExists(nonExistentFileId);
// Assert
assertFalse(result);
}
}

View File

@ -1,202 +0,0 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeoutException;
import java.util.function.Supplier;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.util.ReflectionTestUtils;
import jakarta.servlet.http.HttpServletRequest;
import stirling.software.common.model.job.JobProgress;
import stirling.software.common.model.job.JobResponse;
@ExtendWith(MockitoExtension.class)
class JobExecutorServiceTest {
private JobExecutorService jobExecutorService;
@Mock
private TaskManager taskManager;
@Mock
private FileStorage fileStorage;
@Mock
private HttpServletRequest request;
@Mock
private ResourceMonitor resourceMonitor;
@Mock
private JobQueue jobQueue;
@Captor
private ArgumentCaptor<String> jobIdCaptor;
@BeforeEach
void setUp() {
// Initialize the service manually with all its dependencies
jobExecutorService = new JobExecutorService(
taskManager,
fileStorage,
request,
resourceMonitor,
jobQueue,
30000L, // asyncRequestTimeoutMs
"30m" // sessionTimeout
);
}
@Test
void shouldRunSyncJobSuccessfully() throws Exception {
// Given
Supplier<Object> work = () -> "test-result";
// When
ResponseEntity<?> response = jobExecutorService.runJobGeneric(false, work);
// Then
assertEquals(HttpStatus.OK, response.getStatusCode());
assertEquals("test-result", response.getBody());
// Verify request attribute was set with jobId
verify(request).setAttribute(eq("jobId"), anyString());
}
@Test
void shouldRunAsyncJobSuccessfully() throws Exception {
// Given
Supplier<Object> work = () -> "test-result";
// When
ResponseEntity<?> response = jobExecutorService.runJobGeneric(true, work);
// Then
assertEquals(HttpStatus.OK, response.getStatusCode());
assertTrue(response.getBody() instanceof JobResponse);
JobResponse<?> jobResponse = (JobResponse<?>) response.getBody();
assertTrue(jobResponse.isAsync());
assertNotNull(jobResponse.getJobId());
// Verify task manager was called
verify(taskManager).createTask(jobIdCaptor.capture());
}
@Test
void shouldHandleSyncJobError() {
// Given
Supplier<Object> work = () -> {
throw new RuntimeException("Test error");
};
// When
ResponseEntity<?> response = jobExecutorService.runJobGeneric(false, work);
// Then
assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, response.getStatusCode());
@SuppressWarnings("unchecked")
Map<String, String> errorMap = (Map<String, String>) response.getBody();
assertEquals("Job failed: Test error", errorMap.get("error"));
}
@Test
void shouldQueueJobWhenResourcesLimited() {
// Given
Supplier<Object> work = () -> "test-result";
CompletableFuture<ResponseEntity<?>> future = new CompletableFuture<>();
// Configure resourceMonitor to indicate job should be queued
when(resourceMonitor.shouldQueueJob(80)).thenReturn(true);
// Configure jobQueue to return our future
when(jobQueue.queueJob(anyString(), eq(80), any(), anyLong())).thenReturn(future);
// When
ResponseEntity<?> response = jobExecutorService.runJobGeneric(
true, work, 5000, true, 80);
// Then
assertEquals(HttpStatus.OK, response.getStatusCode());
assertTrue(response.getBody() instanceof JobResponse);
// Verify job was queued
verify(jobQueue).queueJob(anyString(), eq(80), any(), eq(5000L));
verify(taskManager).createTask(anyString());
}
@Test
void shouldUseCustomTimeoutWhenProvided() throws Exception {
// Given
Supplier<Object> work = () -> "test-result";
long customTimeout = 60000L;
// Use reflection to access the private executeWithTimeout method
java.lang.reflect.Method executeMethod = JobExecutorService.class
.getDeclaredMethod("executeWithTimeout", Supplier.class, long.class);
executeMethod.setAccessible(true);
// Create a spy on the JobExecutorService to verify method calls
JobExecutorService spy = Mockito.spy(jobExecutorService);
// When
spy.runJobGeneric(false, work, customTimeout);
// Then
verify(spy).runJobGeneric(eq(false), any(Supplier.class), eq(customTimeout));
}
@Test
void shouldHandleTimeout() throws Exception {
// Given
Supplier<Object> work = () -> {
try {
Thread.sleep(100); // Simulate long-running job
return "test-result";
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
};
// Use reflection to access the private executeWithTimeout method
java.lang.reflect.Method executeMethod = JobExecutorService.class
.getDeclaredMethod("executeWithTimeout", Supplier.class, long.class);
executeMethod.setAccessible(true);
// When/Then
try {
executeMethod.invoke(jobExecutorService, work, 1L); // Very short timeout
} catch (Exception e) {
assertTrue(e.getCause() instanceof TimeoutException);
}
}
}

View File

@ -1,102 +0,0 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import stirling.software.common.model.job.JobProgress;
import stirling.software.common.service.ResourceMonitor.ResourceStatus;
@ExtendWith(MockitoExtension.class)
class JobQueueTest {
private JobQueue jobQueue;
@Mock
private ResourceMonitor resourceMonitor;
private final AtomicReference<ResourceStatus> statusRef = new AtomicReference<>(ResourceStatus.OK);
@BeforeEach
void setUp() {
// Mark stubbing as lenient to avoid UnnecessaryStubbingException
lenient().when(resourceMonitor.calculateDynamicQueueCapacity(anyInt(), anyInt())).thenReturn(10);
lenient().when(resourceMonitor.getCurrentStatus()).thenReturn(statusRef);
// Initialize JobQueue with mocked ResourceMonitor
jobQueue = new JobQueue(resourceMonitor);
}
@Test
void shouldQueueJob() {
String jobId = "test-job-1";
int resourceWeight = 50;
Supplier<Object> work = () -> "test-result";
long timeoutMs = 1000;
jobQueue.queueJob(jobId, resourceWeight, work, timeoutMs);
assertTrue(jobQueue.isJobQueued(jobId));
assertEquals(1, jobQueue.getTotalQueuedJobs());
}
@Test
void shouldCancelJob() {
String jobId = "test-job-2";
Supplier<Object> work = () -> "test-result";
jobQueue.queueJob(jobId, 50, work, 1000);
boolean cancelled = jobQueue.cancelJob(jobId);
assertTrue(cancelled);
assertFalse(jobQueue.isJobQueued(jobId));
}
@Test
void shouldGetQueueStats() {
when(resourceMonitor.getCurrentStatus()).thenReturn(statusRef);
jobQueue.queueJob("job1", 50, () -> "ok", 1000);
jobQueue.queueJob("job2", 50, () -> "ok", 1000);
jobQueue.cancelJob("job2");
Map<String, Object> stats = jobQueue.getQueueStats();
assertEquals(2, stats.get("totalQueuedJobs"));
assertTrue(stats.containsKey("queuedJobs"));
assertTrue(stats.containsKey("resourceStatus"));
}
@Test
void shouldCalculateQueueCapacity() {
when(resourceMonitor.calculateDynamicQueueCapacity(5, 2)).thenReturn(8);
int capacity = resourceMonitor.calculateDynamicQueueCapacity(5, 2);
assertEquals(8, capacity);
}
@Test
void shouldCheckIfJobIsQueued() {
String jobId = "job-123";
Supplier<Object> work = () -> "hello";
jobQueue.queueJob(jobId, 40, work, 500);
assertTrue(jobQueue.isJobQueued(jobId));
assertFalse(jobQueue.isJobQueued("nonexistent"));
}
}

View File

@ -1,137 +0,0 @@
package stirling.software.common.service;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.lang.management.OperatingSystemMXBean;
import java.time.Instant;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.test.util.ReflectionTestUtils;
import stirling.software.common.service.ResourceMonitor.ResourceMetrics;
import stirling.software.common.service.ResourceMonitor.ResourceStatus;
@ExtendWith(MockitoExtension.class)
class ResourceMonitorTest {
@InjectMocks
private ResourceMonitor resourceMonitor;
@Mock
private OperatingSystemMXBean osMXBean;
@Mock
private MemoryMXBean memoryMXBean;
@Spy
private AtomicReference<ResourceStatus> currentStatus = new AtomicReference<>(ResourceStatus.OK);
@Spy
private AtomicReference<ResourceMetrics> latestMetrics = new AtomicReference<>(new ResourceMetrics());
@BeforeEach
void setUp() {
// Set thresholds for testing
ReflectionTestUtils.setField(resourceMonitor, "memoryCriticalThreshold", 0.9);
ReflectionTestUtils.setField(resourceMonitor, "memoryHighThreshold", 0.75);
ReflectionTestUtils.setField(resourceMonitor, "cpuCriticalThreshold", 0.9);
ReflectionTestUtils.setField(resourceMonitor, "cpuHighThreshold", 0.75);
ReflectionTestUtils.setField(resourceMonitor, "osMXBean", osMXBean);
ReflectionTestUtils.setField(resourceMonitor, "memoryMXBean", memoryMXBean);
ReflectionTestUtils.setField(resourceMonitor, "currentStatus", currentStatus);
ReflectionTestUtils.setField(resourceMonitor, "latestMetrics", latestMetrics);
}
@Test
void shouldCalculateDynamicQueueCapacity() {
// Given
int baseCapacity = 10;
int minCapacity = 2;
// Mock current status as OK
currentStatus.set(ResourceStatus.OK);
// When
int capacity = resourceMonitor.calculateDynamicQueueCapacity(baseCapacity, minCapacity);
// Then
assertEquals(baseCapacity, capacity, "With OK status, capacity should equal base capacity");
// Given
currentStatus.set(ResourceStatus.WARNING);
// When
capacity = resourceMonitor.calculateDynamicQueueCapacity(baseCapacity, minCapacity);
// Then
assertEquals(6, capacity, "With WARNING status, capacity should be reduced to 60%");
// Given
currentStatus.set(ResourceStatus.CRITICAL);
// When
capacity = resourceMonitor.calculateDynamicQueueCapacity(baseCapacity, minCapacity);
// Then
assertEquals(3, capacity, "With CRITICAL status, capacity should be reduced to 30%");
// Test minimum capacity enforcement
assertEquals(minCapacity, resourceMonitor.calculateDynamicQueueCapacity(1, minCapacity),
"Should never go below minimum capacity");
}
@ParameterizedTest
@CsvSource({
"10, OK, false", // Light job, OK status
"10, WARNING, false", // Light job, WARNING status
"10, CRITICAL, true", // Light job, CRITICAL status
"30, OK, false", // Medium job, OK status
"30, WARNING, true", // Medium job, WARNING status
"30, CRITICAL, true", // Medium job, CRITICAL status
"80, OK, true", // Heavy job, OK status
"80, WARNING, true", // Heavy job, WARNING status
"80, CRITICAL, true" // Heavy job, CRITICAL status
})
void shouldQueueJobBasedOnWeightAndStatus(int weight, ResourceStatus status, boolean shouldQueue) {
// Given
currentStatus.set(status);
// When
boolean result = resourceMonitor.shouldQueueJob(weight);
// Then
assertEquals(shouldQueue, result,
String.format("For weight %d and status %s, shouldQueue should be %s",
weight, status, shouldQueue));
}
@Test
void resourceMetricsShouldDetectStaleState() {
// Given
Instant now = Instant.now();
Instant pastInstant = now.minusMillis(6000);
ResourceMetrics staleMetrics = new ResourceMetrics(0.5, 0.5, 1024, 2048, 4096, pastInstant);
ResourceMetrics freshMetrics = new ResourceMetrics(0.5, 0.5, 1024, 2048, 4096, now);
// When/Then
assertTrue(staleMetrics.isStale(5000), "Metrics from 6 seconds ago should be stale with 5s threshold");
assertFalse(freshMetrics.isStale(5000), "Fresh metrics should not be stale");
}
}

View File

@ -1,31 +0,0 @@
package stirling.software.common.service;
import org.apache.pdfbox.io.RandomAccessStreamCache.StreamCacheCreateFunction;
class SpyPDFDocumentFactory extends CustomPDFDocumentFactory {
enum StrategyType {
MEMORY_ONLY,
MIXED,
TEMP_FILE
}
public StrategyType lastStrategyUsed;
public SpyPDFDocumentFactory(PdfMetadataService service) {
super(service);
}
@Override
public StreamCacheCreateFunction getStreamCacheFunction(long contentSize) {
StrategyType type;
if (contentSize < 10 * 1024 * 1024) {
type = StrategyType.MEMORY_ONLY;
} else if (contentSize < 50 * 1024 * 1024) {
type = StrategyType.MIXED;
} else {
type = StrategyType.TEMP_FILE;
}
this.lastStrategyUsed = type;
return super.getStreamCacheFunction(contentSize); // delegate to real behavior
}
}

View File

@ -1,309 +0,0 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
import java.time.LocalDateTime;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.test.util.ReflectionTestUtils;
import stirling.software.common.model.job.JobResult;
import stirling.software.common.model.job.JobStats;
import stirling.software.common.model.job.ResultFile;
class TaskManagerTest {
@Mock
private FileStorage fileStorage;
@InjectMocks
private TaskManager taskManager;
private AutoCloseable closeable;
@BeforeEach
void setUp() {
closeable = MockitoAnnotations.openMocks(this);
ReflectionTestUtils.setField(taskManager, "jobResultExpiryMinutes", 30);
}
@AfterEach
void tearDown() throws Exception {
closeable.close();
}
@Test
void testCreateTask() {
// Act
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
// Assert
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertEquals(jobId, result.getJobId());
assertFalse(result.isComplete());
assertNotNull(result.getCreatedAt());
}
@Test
void testSetResult() {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
Object resultObject = "Test result";
// Act
taskManager.setResult(jobId, resultObject);
// Assert
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertTrue(result.isComplete());
assertEquals(resultObject, result.getResult());
assertNotNull(result.getCompletedAt());
}
@Test
void testSetFileResult() throws Exception {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
String fileId = "file-id";
String originalFileName = "test.pdf";
String contentType = "application/pdf";
long fileSize = 1024L;
// Mock the fileStorage.getFileSize() call
when(fileStorage.getFileSize(fileId)).thenReturn(fileSize);
// Act
taskManager.setFileResult(jobId, fileId, originalFileName, contentType);
// Assert
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertTrue(result.isComplete());
assertTrue(result.hasFiles());
assertFalse(result.hasMultipleFiles());
var resultFiles = result.getAllResultFiles();
assertEquals(1, resultFiles.size());
ResultFile resultFile = resultFiles.get(0);
assertEquals(fileId, resultFile.getFileId());
assertEquals(originalFileName, resultFile.getFileName());
assertEquals(contentType, resultFile.getContentType());
assertEquals(fileSize, resultFile.getFileSize());
assertNotNull(result.getCompletedAt());
}
@Test
void testSetError() {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
String errorMessage = "Test error";
// Act
taskManager.setError(jobId, errorMessage);
// Assert
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertTrue(result.isComplete());
assertEquals(errorMessage, result.getError());
assertNotNull(result.getCompletedAt());
}
@Test
void testSetComplete_WithExistingResult() {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
Object resultObject = "Test result";
taskManager.setResult(jobId, resultObject);
// Act
taskManager.setComplete(jobId);
// Assert
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertTrue(result.isComplete());
assertEquals(resultObject, result.getResult());
}
@Test
void testSetComplete_WithoutExistingResult() {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
// Act
taskManager.setComplete(jobId);
// Assert
JobResult result = taskManager.getJobResult(jobId);
assertNotNull(result);
assertTrue(result.isComplete());
assertEquals("Task completed successfully", result.getResult());
}
@Test
void testIsComplete() {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
// Assert - not complete initially
assertFalse(taskManager.isComplete(jobId));
// Act - mark as complete
taskManager.setComplete(jobId);
// Assert - now complete
assertTrue(taskManager.isComplete(jobId));
}
@Test
void testGetJobStats() throws Exception {
// Arrange
// Mock fileStorage.getFileSize for file operations
when(fileStorage.getFileSize("file-id")).thenReturn(1024L);
// 1. Create active job
String activeJobId = "active-job";
taskManager.createTask(activeJobId);
// 2. Create completed successful job with file
String successFileJobId = "success-file-job";
taskManager.createTask(successFileJobId);
taskManager.setFileResult(successFileJobId, "file-id", "test.pdf", "application/pdf");
// 3. Create completed successful job without file
String successJobId = "success-job";
taskManager.createTask(successJobId);
taskManager.setResult(successJobId, "Result");
// 4. Create failed job
String failedJobId = "failed-job";
taskManager.createTask(failedJobId);
taskManager.setError(failedJobId, "Error message");
// Act
JobStats stats = taskManager.getJobStats();
// Assert
assertEquals(4, stats.getTotalJobs());
assertEquals(1, stats.getActiveJobs());
assertEquals(3, stats.getCompletedJobs());
assertEquals(1, stats.getFailedJobs());
assertEquals(2, stats.getSuccessfulJobs());
assertEquals(1, stats.getFileResultJobs());
assertNotNull(stats.getNewestActiveJobTime());
assertNotNull(stats.getOldestActiveJobTime());
assertTrue(stats.getAverageProcessingTimeMs() >= 0);
}
@Test
void testCleanupOldJobs() throws Exception {
// Arrange
// 1. Create a recent completed job
String recentJobId = "recent-job";
taskManager.createTask(recentJobId);
taskManager.setResult(recentJobId, "Result");
// 2. Create an old completed job with file result
String oldJobId = "old-job";
taskManager.createTask(oldJobId);
JobResult oldJob = taskManager.getJobResult(oldJobId);
// Manually set the completion time to be older than the expiry
LocalDateTime oldTime = LocalDateTime.now().minusHours(1);
ReflectionTestUtils.setField(oldJob, "completedAt", oldTime);
ReflectionTestUtils.setField(oldJob, "complete", true);
// Create a ResultFile and set it using the new approach
ResultFile resultFile = ResultFile.builder()
.fileId("file-id")
.fileName("test.pdf")
.contentType("application/pdf")
.fileSize(1024L)
.build();
ReflectionTestUtils.setField(oldJob, "resultFiles", java.util.List.of(resultFile));
when(fileStorage.deleteFile("file-id")).thenReturn(true);
// Obtain access to the private jobResults map
Map<String, JobResult> jobResultsMap = (Map<String, JobResult>) ReflectionTestUtils.getField(taskManager, "jobResults");
// 3. Create an active job
String activeJobId = "active-job";
taskManager.createTask(activeJobId);
// Verify all jobs are in the map
assertTrue(jobResultsMap.containsKey(recentJobId));
assertTrue(jobResultsMap.containsKey(oldJobId));
assertTrue(jobResultsMap.containsKey(activeJobId));
// Act
taskManager.cleanupOldJobs();
// Assert - the old job should be removed
assertFalse(jobResultsMap.containsKey(oldJobId));
assertTrue(jobResultsMap.containsKey(recentJobId));
assertTrue(jobResultsMap.containsKey(activeJobId));
verify(fileStorage).deleteFile("file-id");
}
@Test
void testShutdown() throws Exception {
// This mainly tests that the shutdown method doesn't throw exceptions
taskManager.shutdown();
// Verify the executor service is shutdown
// This is difficult to test directly, but we can verify it doesn't throw exceptions
}
@Test
void testAddNote() {
// Arrange
String jobId = UUID.randomUUID().toString();
taskManager.createTask(jobId);
String note = "Test note";
// Act
boolean result = taskManager.addNote(jobId, note);
// Assert
assertTrue(result);
JobResult jobResult = taskManager.getJobResult(jobId);
assertNotNull(jobResult);
assertNotNull(jobResult.getNotes());
assertEquals(1, jobResult.getNotes().size());
assertEquals(note, jobResult.getNotes().get(0));
}
@Test
void testAddNote_NonExistentJob() {
// Arrange
String jobId = "non-existent-job";
String note = "Test note";
// Act
boolean result = taskManager.addNote(jobId, note);
// Assert
assertFalse(result);
}
}

View File

@ -1,464 +0,0 @@
package stirling.software.common.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.*;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.MockitoAnnotations;
import org.springframework.test.util.ReflectionTestUtils;
import stirling.software.common.model.ApplicationProperties;
import stirling.software.common.util.TempFileManager;
import stirling.software.common.util.TempFileRegistry;
/**
* Tests for the TempFileCleanupService, focusing on its pattern-matching and cleanup logic.
*/
public class TempFileCleanupServiceTest {
@TempDir
Path tempDir;
@Mock
private TempFileRegistry registry;
@Mock
private TempFileManager tempFileManager;
@Mock
private ApplicationProperties applicationProperties;
@Mock
private ApplicationProperties.System system;
@Mock
private ApplicationProperties.TempFileManagement tempFileManagement;
@InjectMocks
private TempFileCleanupService cleanupService;
private Path systemTempDir;
private Path customTempDir;
private Path libreOfficeTempDir;
@BeforeEach
public void setup() throws IOException {
MockitoAnnotations.openMocks(this);
// Create test directories
systemTempDir = tempDir.resolve("systemTemp");
customTempDir = tempDir.resolve("customTemp");
libreOfficeTempDir = tempDir.resolve("libreOfficeTemp");
Files.createDirectories(systemTempDir);
Files.createDirectories(customTempDir);
Files.createDirectories(libreOfficeTempDir);
// Configure ApplicationProperties mocks
when(applicationProperties.getSystem()).thenReturn(system);
when(system.getTempFileManagement()).thenReturn(tempFileManagement);
when(tempFileManagement.getBaseTmpDir()).thenReturn(customTempDir.toString());
when(tempFileManagement.getLibreofficeDir()).thenReturn(libreOfficeTempDir.toString());
when(tempFileManagement.getSystemTempDir()).thenReturn(systemTempDir.toString());
when(tempFileManagement.isStartupCleanup()).thenReturn(false);
when(tempFileManagement.isCleanupSystemTemp()).thenReturn(false);
when(tempFileManagement.getCleanupIntervalMinutes()).thenReturn(30L);
// Set machineType using reflection (still needed for this field)
ReflectionTestUtils.setField(cleanupService, "machineType", "Standard");
when(tempFileManager.getMaxAgeMillis()).thenReturn(3600000L); // 1 hour
}
@Test
public void testScheduledCleanup_RegisteredFiles() {
// Arrange
when(tempFileManager.cleanupOldTempFiles(anyLong())).thenReturn(5); // 5 files deleted
Set<Path> registeredDirs = new HashSet<>();
registeredDirs.add(tempDir.resolve("registeredDir"));
when(registry.getTempDirectories()).thenReturn(registeredDirs);
// Act
cleanupService.scheduledCleanup();
// Assert
verify(tempFileManager).cleanupOldTempFiles(anyLong());
verify(registry, times(1)).getTempDirectories();
}
@Test
public void testCleanupTempFilePatterns() throws IOException {
// Arrange - Create various temp files
Path ourTempFile1 = Files.createFile(systemTempDir.resolve("output_123.pdf"));
Path ourTempFile2 = Files.createFile(systemTempDir.resolve("compressedPDF456.pdf"));
Path ourTempFile3 = Files.createFile(customTempDir.resolve("stirling-pdf-789.tmp"));
Path ourTempFile4 = Files.createFile(customTempDir.resolve("pdf-save-123-456.tmp"));
Path ourTempFile5 = Files.createFile(libreOfficeTempDir.resolve("input_file.pdf"));
// Old temporary files
Path oldTempFile = Files.createFile(systemTempDir.resolve("output_old.pdf"));
// System temp files that should be cleaned in container mode
Path sysTempFile1 = Files.createFile(systemTempDir.resolve("lu123abc.tmp"));
Path sysTempFile2 = Files.createFile(customTempDir.resolve("ocr_process123"));
Path sysTempFile3 = Files.createFile(customTempDir.resolve("tmp_upload.tmp"));
// Files that should be preserved
Path jettyFile1 = Files.createFile(systemTempDir.resolve("jetty-123.tmp"));
Path jettyFile2 = Files.createFile(systemTempDir.resolve("something-with-jetty-inside.tmp"));
Path regularFile = Files.createFile(systemTempDir.resolve("important.txt"));
// Create a nested directory with temp files
Path nestedDir = Files.createDirectories(systemTempDir.resolve("nested"));
Path nestedTempFile = Files.createFile(nestedDir.resolve("output_nested.pdf"));
// Empty file (special case)
Path emptyFile = Files.createFile(systemTempDir.resolve("empty.tmp"));
// Configure mock registry to say these files aren't registered
when(registry.contains(any(File.class))).thenReturn(false);
// The set of files that will be deleted in our test
Set<Path> deletedFiles = new HashSet<>();
// Use MockedStatic to mock Files operations
try (MockedStatic<Files> mockedFiles = mockStatic(Files.class)) {
// Mock Files.list for each directory we'll process
mockedFiles.when(() -> Files.list(eq(systemTempDir)))
.thenReturn(Stream.of(
ourTempFile1, ourTempFile2, oldTempFile, sysTempFile1,
jettyFile1, jettyFile2, regularFile, emptyFile, nestedDir));
mockedFiles.when(() -> Files.list(eq(customTempDir)))
.thenReturn(Stream.of(ourTempFile3, ourTempFile4, sysTempFile2, sysTempFile3));
mockedFiles.when(() -> Files.list(eq(libreOfficeTempDir)))
.thenReturn(Stream.of(ourTempFile5));
mockedFiles.when(() -> Files.list(eq(nestedDir)))
.thenReturn(Stream.of(nestedTempFile));
// Configure Files.isDirectory for each path
mockedFiles.when(() -> Files.isDirectory(eq(nestedDir))).thenReturn(true);
mockedFiles.when(() -> Files.isDirectory(any(Path.class))).thenReturn(false);
// Configure Files.exists to return true for all paths
mockedFiles.when(() -> Files.exists(any(Path.class))).thenReturn(true);
// Configure Files.getLastModifiedTime to return different times based on file names
mockedFiles.when(() -> Files.getLastModifiedTime(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
String fileName = path.getFileName().toString();
// For files with "old" in the name, return a timestamp older than maxAgeMillis
if (fileName.contains("old")) {
return FileTime.fromMillis(System.currentTimeMillis() - 5000000);
}
// For empty.tmp file, return a timestamp older than 5 minutes (for empty file test)
else if (fileName.equals("empty.tmp")) {
return FileTime.fromMillis(System.currentTimeMillis() - 6 * 60 * 1000);
}
// For all other files, return a recent timestamp
else {
return FileTime.fromMillis(System.currentTimeMillis() - 60000); // 1 minute ago
}
});
// Configure Files.size to return different sizes based on file names
mockedFiles.when(() -> Files.size(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
String fileName = path.getFileName().toString();
// Return 0 bytes for the empty file
if (fileName.equals("empty.tmp")) {
return 0L;
}
// Return normal size for all other files
else {
return 1024L; // 1 KB
}
});
// For deleteIfExists, track which files would be deleted
mockedFiles.when(() -> Files.deleteIfExists(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
deletedFiles.add(path);
return true;
});
// Act - set containerMode to false for this test
invokeCleanupDirectoryStreaming(systemTempDir, false, 0, 3600000);
invokeCleanupDirectoryStreaming(customTempDir, false, 0, 3600000);
invokeCleanupDirectoryStreaming(libreOfficeTempDir, false, 0, 3600000);
// Assert - Only old temp files and empty files should be deleted
assertTrue(deletedFiles.contains(oldTempFile), "Old temp file should be deleted");
assertTrue(deletedFiles.contains(emptyFile), "Empty file should be deleted");
// Regular temp files should not be deleted because they're too new
assertFalse(deletedFiles.contains(ourTempFile1), "Recent temp file should be preserved");
assertFalse(deletedFiles.contains(ourTempFile2), "Recent temp file should be preserved");
assertFalse(deletedFiles.contains(ourTempFile3), "Recent temp file should be preserved");
assertFalse(deletedFiles.contains(ourTempFile4), "Recent temp file should be preserved");
assertFalse(deletedFiles.contains(ourTempFile5), "Recent temp file should be preserved");
// System temp files should not be deleted in non-container mode
assertFalse(deletedFiles.contains(sysTempFile1), "System temp file should be preserved in non-container mode");
assertFalse(deletedFiles.contains(sysTempFile2), "System temp file should be preserved in non-container mode");
assertFalse(deletedFiles.contains(sysTempFile3), "System temp file should be preserved in non-container mode");
// Jetty files and regular files should never be deleted
assertFalse(deletedFiles.contains(jettyFile1), "Jetty file should be preserved");
assertFalse(deletedFiles.contains(jettyFile2), "File with jetty in name should be preserved");
assertFalse(deletedFiles.contains(regularFile), "Regular file should be preserved");
}
}
@Test
public void testContainerModeCleanup() throws IOException {
// Arrange - Create various temp files
Path ourTempFile = Files.createFile(systemTempDir.resolve("output_123.pdf"));
Path sysTempFile = Files.createFile(systemTempDir.resolve("lu123abc.tmp"));
Path regularFile = Files.createFile(systemTempDir.resolve("important.txt"));
// Configure mock registry to say these files aren't registered
when(registry.contains(any(File.class))).thenReturn(false);
// The set of files that will be deleted in our test
Set<Path> deletedFiles = new HashSet<>();
// Use MockedStatic to mock Files operations
try (MockedStatic<Files> mockedFiles = mockStatic(Files.class)) {
// Mock Files.list for systemTempDir
mockedFiles.when(() -> Files.list(eq(systemTempDir)))
.thenReturn(Stream.of(ourTempFile, sysTempFile, regularFile));
// Configure Files.isDirectory
mockedFiles.when(() -> Files.isDirectory(any(Path.class))).thenReturn(false);
// Configure Files.exists
mockedFiles.when(() -> Files.exists(any(Path.class))).thenReturn(true);
// Configure Files.getLastModifiedTime to return recent timestamps
mockedFiles.when(() -> Files.getLastModifiedTime(any(Path.class)))
.thenReturn(FileTime.fromMillis(System.currentTimeMillis() - 60000)); // 1 minute ago
// Configure Files.size to return normal size
mockedFiles.when(() -> Files.size(any(Path.class)))
.thenReturn(1024L); // 1 KB
// For deleteIfExists, track which files would be deleted
mockedFiles.when(() -> Files.deleteIfExists(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
deletedFiles.add(path);
return true;
});
// Act - set containerMode to true and maxAgeMillis to 0 for container startup cleanup
invokeCleanupDirectoryStreaming(systemTempDir, true, 0, 0);
// Assert - In container mode, both our temp files and system temp files should be deleted
// regardless of age (when maxAgeMillis is 0)
assertTrue(deletedFiles.contains(ourTempFile), "Our temp file should be deleted in container mode");
assertTrue(deletedFiles.contains(sysTempFile), "System temp file should be deleted in container mode");
assertFalse(deletedFiles.contains(regularFile), "Regular file should be preserved");
}
}
@Test
public void testEmptyFileHandling() throws IOException {
// Arrange - Create an empty file
Path emptyFile = Files.createFile(systemTempDir.resolve("empty.tmp"));
Path recentEmptyFile = Files.createFile(systemTempDir.resolve("recent_empty.tmp"));
// Configure mock registry to say these files aren't registered
when(registry.contains(any(File.class))).thenReturn(false);
// The set of files that will be deleted in our test
Set<Path> deletedFiles = new HashSet<>();
// Use MockedStatic to mock Files operations
try (MockedStatic<Files> mockedFiles = mockStatic(Files.class)) {
// Mock Files.list for systemTempDir
mockedFiles.when(() -> Files.list(eq(systemTempDir)))
.thenReturn(Stream.of(emptyFile, recentEmptyFile));
// Configure Files.isDirectory
mockedFiles.when(() -> Files.isDirectory(any(Path.class))).thenReturn(false);
// Configure Files.exists
mockedFiles.when(() -> Files.exists(any(Path.class))).thenReturn(true);
// Configure Files.getLastModifiedTime to return different times based on file names
mockedFiles.when(() -> Files.getLastModifiedTime(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
String fileName = path.getFileName().toString();
if (fileName.equals("empty.tmp")) {
// More than 5 minutes old
return FileTime.fromMillis(System.currentTimeMillis() - 6 * 60 * 1000);
} else {
// Less than 5 minutes old
return FileTime.fromMillis(System.currentTimeMillis() - 2 * 60 * 1000);
}
});
// Configure Files.size to return 0 for empty files
mockedFiles.when(() -> Files.size(any(Path.class)))
.thenReturn(0L);
// For deleteIfExists, track which files would be deleted
mockedFiles.when(() -> Files.deleteIfExists(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
deletedFiles.add(path);
return true;
});
// Act
invokeCleanupDirectoryStreaming(systemTempDir, false, 0, 3600000);
// Assert
assertTrue(deletedFiles.contains(emptyFile),
"Empty file older than 5 minutes should be deleted");
assertFalse(deletedFiles.contains(recentEmptyFile),
"Empty file newer than 5 minutes should not be deleted");
}
}
@Test
public void testRecursiveDirectoryCleaning() throws IOException {
// Arrange - Create a nested directory structure with temp files
Path dir1 = Files.createDirectories(systemTempDir.resolve("dir1"));
Path dir2 = Files.createDirectories(dir1.resolve("dir2"));
Path dir3 = Files.createDirectories(dir2.resolve("dir3"));
Path tempFile1 = Files.createFile(dir1.resolve("output_1.pdf"));
Path tempFile2 = Files.createFile(dir2.resolve("output_2.pdf"));
Path tempFile3 = Files.createFile(dir3.resolve("output_old_3.pdf"));
// Configure mock registry to say these files aren't registered
when(registry.contains(any(File.class))).thenReturn(false);
// The set of files that will be deleted in our test
Set<Path> deletedFiles = new HashSet<>();
// Use MockedStatic to mock Files operations
try (MockedStatic<Files> mockedFiles = mockStatic(Files.class)) {
// Mock Files.list for each directory
mockedFiles.when(() -> Files.list(eq(systemTempDir)))
.thenReturn(Stream.of(dir1));
mockedFiles.when(() -> Files.list(eq(dir1)))
.thenReturn(Stream.of(tempFile1, dir2));
mockedFiles.when(() -> Files.list(eq(dir2)))
.thenReturn(Stream.of(tempFile2, dir3));
mockedFiles.when(() -> Files.list(eq(dir3)))
.thenReturn(Stream.of(tempFile3));
// Configure Files.isDirectory for each path
mockedFiles.when(() -> Files.isDirectory(eq(dir1))).thenReturn(true);
mockedFiles.when(() -> Files.isDirectory(eq(dir2))).thenReturn(true);
mockedFiles.when(() -> Files.isDirectory(eq(dir3))).thenReturn(true);
mockedFiles.when(() -> Files.isDirectory(eq(tempFile1))).thenReturn(false);
mockedFiles.when(() -> Files.isDirectory(eq(tempFile2))).thenReturn(false);
mockedFiles.when(() -> Files.isDirectory(eq(tempFile3))).thenReturn(false);
// Configure Files.exists to return true for all paths
mockedFiles.when(() -> Files.exists(any(Path.class))).thenReturn(true);
// Configure Files.getLastModifiedTime to return different times based on file names
mockedFiles.when(() -> Files.getLastModifiedTime(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
String fileName = path.getFileName().toString();
if (fileName.contains("old")) {
// Old file
return FileTime.fromMillis(System.currentTimeMillis() - 5000000);
} else {
// Recent file
return FileTime.fromMillis(System.currentTimeMillis() - 60000);
}
});
// Configure Files.size to return normal size
mockedFiles.when(() -> Files.size(any(Path.class)))
.thenReturn(1024L);
// For deleteIfExists, track which files would be deleted
mockedFiles.when(() -> Files.deleteIfExists(any(Path.class)))
.thenAnswer(invocation -> {
Path path = invocation.getArgument(0);
deletedFiles.add(path);
return true;
});
// Act
invokeCleanupDirectoryStreaming(systemTempDir, false, 0, 3600000);
// Debug - print what was deleted
System.out.println("Deleted files: " + deletedFiles);
System.out.println("Looking for: " + tempFile3);
// Assert
assertFalse(deletedFiles.contains(tempFile1), "Recent temp file should be preserved");
assertFalse(deletedFiles.contains(tempFile2), "Recent temp file should be preserved");
assertTrue(deletedFiles.contains(tempFile3), "Old temp file in nested directory should be deleted");
}
}
/**
* Helper method to invoke the private cleanupDirectoryStreaming method using reflection
*/
private void invokeCleanupDirectoryStreaming(Path directory, boolean containerMode, int depth, long maxAgeMillis)
throws IOException {
try {
// Create a consumer that tracks deleted files
AtomicInteger deleteCount = new AtomicInteger(0);
Consumer<Path> deleteCallback = path -> deleteCount.incrementAndGet();
// Get the method with updated signature
var method = TempFileCleanupService.class.getDeclaredMethod(
"cleanupDirectoryStreaming",
Path.class, boolean.class, int.class, long.class, boolean.class, Consumer.class);
method.setAccessible(true);
// Invoke the method with appropriate parameters
method.invoke(cleanupService, directory, containerMode, depth, maxAgeMillis, false, deleteCallback);
} catch (Exception e) {
throw new RuntimeException("Error invoking cleanupDirectoryStreaming", e);
}
}
// Matcher for exact path equality
private static Path eq(Path path) {
return argThat(arg -> arg != null && arg.equals(path));
}
}

View File

@ -1,205 +0,0 @@
package stirling.software.common.util;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Arrays;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import stirling.software.common.util.ProcessExecutor.ProcessExecutorResult;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.Mockito.mockStatic;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
class CheckProgramInstallTest {
private MockedStatic<ProcessExecutor> mockProcessExecutor;
private ProcessExecutor mockExecutor;
@BeforeEach
void setUp() throws Exception {
// Reset static variables before each test
resetStaticFields();
// Set up mock for ProcessExecutor
mockExecutor = Mockito.mock(ProcessExecutor.class);
mockProcessExecutor = mockStatic(ProcessExecutor.class);
mockProcessExecutor
.when(() -> ProcessExecutor.getInstance(ProcessExecutor.Processes.PYTHON_OPENCV))
.thenReturn(mockExecutor);
}
@AfterEach
void tearDown() {
// Close the static mock to prevent memory leaks
if (mockProcessExecutor != null) {
mockProcessExecutor.close();
}
}
/** Reset static fields in the CheckProgramInstall class using reflection */
private void resetStaticFields() throws Exception {
Field pythonAvailableCheckedField =
CheckProgramInstall.class.getDeclaredField("pythonAvailableChecked");
pythonAvailableCheckedField.setAccessible(true);
pythonAvailableCheckedField.set(null, false);
Field availablePythonCommandField =
CheckProgramInstall.class.getDeclaredField("availablePythonCommand");
availablePythonCommandField.setAccessible(true);
availablePythonCommandField.set(null, null);
}
@Test
void testGetAvailablePythonCommand_WhenPython3IsAvailable()
throws IOException, InterruptedException {
// Arrange
ProcessExecutorResult result = Mockito.mock(ProcessExecutorResult.class);
when(result.getRc()).thenReturn(0);
when(result.getMessages()).thenReturn("Python 3.9.0");
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python3", "--version")))
.thenReturn(result);
// Act
String pythonCommand = CheckProgramInstall.getAvailablePythonCommand();
// Assert
assertEquals("python3", pythonCommand);
assertTrue(CheckProgramInstall.isPythonAvailable());
// Verify that the command was executed
verify(mockExecutor).runCommandWithOutputHandling(Arrays.asList("python3", "--version"));
}
@Test
void testGetAvailablePythonCommand_WhenPython3IsNotAvailableButPythonIs()
throws IOException, InterruptedException {
// Arrange
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python3", "--version")))
.thenThrow(new IOException("Command not found"));
ProcessExecutorResult result = Mockito.mock(ProcessExecutorResult.class);
when(result.getRc()).thenReturn(0);
when(result.getMessages()).thenReturn("Python 2.7.0");
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python", "--version")))
.thenReturn(result);
// Act
String pythonCommand = CheckProgramInstall.getAvailablePythonCommand();
// Assert
assertEquals("python", pythonCommand);
assertTrue(CheckProgramInstall.isPythonAvailable());
// Verify that both commands were attempted
verify(mockExecutor).runCommandWithOutputHandling(Arrays.asList("python3", "--version"));
verify(mockExecutor).runCommandWithOutputHandling(Arrays.asList("python", "--version"));
}
@Test
void testGetAvailablePythonCommand_WhenPythonReturnsNonZeroExitCode()
throws IOException, InterruptedException, Exception {
// Arrange
// Reset the static fields again to ensure clean state
resetStaticFields();
// Since we want to test the scenario where Python returns a non-zero exit code
// We need to make sure both python3 and python commands are mocked to return failures
ProcessExecutorResult resultPython3 = Mockito.mock(ProcessExecutorResult.class);
when(resultPython3.getRc()).thenReturn(1); // Non-zero exit code
when(resultPython3.getMessages()).thenReturn("Error");
// Important: in the CheckProgramInstall implementation, only checks if
// command throws exception, it doesn't check the return code
// So we need to throw an exception instead
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python3", "--version")))
.thenThrow(new IOException("Command failed with non-zero exit code"));
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python", "--version")))
.thenThrow(new IOException("Command failed with non-zero exit code"));
// Act
String pythonCommand = CheckProgramInstall.getAvailablePythonCommand();
// Assert - Both commands throw exceptions, so no python is available
assertNull(pythonCommand);
assertFalse(CheckProgramInstall.isPythonAvailable());
}
@Test
void testGetAvailablePythonCommand_WhenNoPythonIsAvailable()
throws IOException, InterruptedException {
// Arrange
when(mockExecutor.runCommandWithOutputHandling(anyList()))
.thenThrow(new IOException("Command not found"));
// Act
String pythonCommand = CheckProgramInstall.getAvailablePythonCommand();
// Assert
assertNull(pythonCommand);
assertFalse(CheckProgramInstall.isPythonAvailable());
// Verify attempts to run both python3 and python
verify(mockExecutor).runCommandWithOutputHandling(Arrays.asList("python3", "--version"));
verify(mockExecutor).runCommandWithOutputHandling(Arrays.asList("python", "--version"));
}
@Test
void testGetAvailablePythonCommand_CachesResult() throws IOException, InterruptedException {
// Arrange
ProcessExecutorResult result = Mockito.mock(ProcessExecutorResult.class);
when(result.getRc()).thenReturn(0);
when(result.getMessages()).thenReturn("Python 3.9.0");
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python3", "--version")))
.thenReturn(result);
// Act
String firstCall = CheckProgramInstall.getAvailablePythonCommand();
// Change the mock to simulate a change in the environment
when(mockExecutor.runCommandWithOutputHandling(anyList()))
.thenThrow(new IOException("Command not found"));
String secondCall = CheckProgramInstall.getAvailablePythonCommand();
// Assert
assertEquals("python3", firstCall);
assertEquals("python3", secondCall); // Second call should return the cached result
// Verify python3 command was only executed once (caching worked)
verify(mockExecutor, times(1))
.runCommandWithOutputHandling(Arrays.asList("python3", "--version"));
}
@Test
void testIsPythonAvailable_DirectCall() throws Exception {
// Arrange
ProcessExecutorResult result = Mockito.mock(ProcessExecutorResult.class);
when(result.getRc()).thenReturn(0);
when(result.getMessages()).thenReturn("Python 3.9.0");
when(mockExecutor.runCommandWithOutputHandling(Arrays.asList("python3", "--version")))
.thenReturn(result);
// Reset again to ensure clean state
resetStaticFields();
// Act - Call isPythonAvailable() directly
boolean pythonAvailable = CheckProgramInstall.isPythonAvailable();
// Assert
assertTrue(pythonAvailable);
// Verify getAvailablePythonCommand was called internally
verify(mockExecutor).runCommandWithOutputHandling(Arrays.asList("python3", "--version"));
}
}

View File

@ -1,352 +0,0 @@
package stirling.software.common.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.stream.Stream;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import stirling.software.common.service.SsrfProtectionService;
class CustomHtmlSanitizerTest {
private CustomHtmlSanitizer customHtmlSanitizer;
@BeforeEach
void setUp() {
SsrfProtectionService mockSsrfProtectionService = mock(SsrfProtectionService.class);
stirling.software.common.model.ApplicationProperties mockApplicationProperties = mock(stirling.software.common.model.ApplicationProperties.class);
stirling.software.common.model.ApplicationProperties.System mockSystem = mock(stirling.software.common.model.ApplicationProperties.System.class);
// Allow all URLs by default for basic tests
when(mockSsrfProtectionService.isUrlAllowed(org.mockito.ArgumentMatchers.anyString())).thenReturn(true);
when(mockApplicationProperties.getSystem()).thenReturn(mockSystem);
when(mockSystem.getDisableSanitize()).thenReturn(false); // Enable sanitization for tests
customHtmlSanitizer = new CustomHtmlSanitizer(mockSsrfProtectionService, mockApplicationProperties);
}
@ParameterizedTest
@MethodSource("provideHtmlTestCases")
void testSanitizeHtml(String inputHtml, String[] expectedContainedTags) {
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(inputHtml);
// Assert
for (String tag : expectedContainedTags) {
assertTrue(sanitizedHtml.contains(tag), tag + " should be preserved");
}
}
private static Stream<Arguments> provideHtmlTestCases() {
return Stream.of(
Arguments.of(
"<p>This is <strong>valid</strong> HTML with <em>formatting</em>.</p>",
new String[] {"<p>", "<strong>", "<em>"}),
Arguments.of(
"<p>Text with <b>bold</b>, <i>italic</i>, <u>underline</u>, "
+ "<em>emphasis</em>, <strong>strong</strong>, <strike>strikethrough</strike>, "
+ "<s>strike</s>, <sub>subscript</sub>, <sup>superscript</sup>, "
+ "<tt>teletype</tt>, <code>code</code>, <big>big</big>, <small>small</small>.</p>",
new String[] {
"<b>bold</b>",
"<i>italic</i>",
"<em>emphasis</em>",
"<strong>strong</strong>"
}),
Arguments.of(
"<div>Division</div><h1>Heading 1</h1><h2>Heading 2</h2><h3>Heading 3</h3>"
+ "<h4>Heading 4</h4><h5>Heading 5</h5><h6>Heading 6</h6>"
+ "<blockquote>Blockquote</blockquote><ul><li>List item</li></ul>"
+ "<ol><li>Ordered item</li></ol>",
new String[] {
"<div>", "<h1>", "<h6>", "<blockquote>", "<ul>", "<ol>", "<li>"
}));
}
@Test
void testSanitizeAllowsStyles() {
// Arrange - Testing Sanitizers.STYLES
String htmlWithStyles =
"<p style=\"color: blue; font-size: 16px; margin-top: 10px;\">Styled text</p>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithStyles);
// Assert
// The OWASP HTML Sanitizer might filter some specific styles, so we only check that
// the sanitized HTML is not empty and contains a paragraph tag with style
assertTrue(sanitizedHtml.contains("<p"), "Paragraph tag should be preserved");
assertTrue(sanitizedHtml.contains("style="), "Style attribute should be preserved");
assertTrue(sanitizedHtml.contains("Styled text"), "Content should be preserved");
}
@Test
void testSanitizeAllowsLinks() {
// Arrange - Testing Sanitizers.LINKS
String htmlWithLink =
"<a href=\"https://example.com\" title=\"Example Site\">Example Link</a>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithLink);
// Assert
// The most important aspect is that the link content is preserved
assertTrue(sanitizedHtml.contains("Example Link"), "Link text should be preserved");
// Check that the href is present in some form
assertTrue(sanitizedHtml.contains("href="), "Link href attribute should be present");
// Check that the URL is present in some form
assertTrue(sanitizedHtml.contains("example.com"), "Link URL should be preserved");
// OWASP sanitizer may handle title attributes differently depending on version
// So we won't make strict assertions about the title attribute
}
@Test
void testSanitizeDisallowsJavaScriptLinks() {
// Arrange
String htmlWithJsLink = "<a href=\"javascript:alert('XSS')\">Malicious Link</a>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithJsLink);
// Assert
assertFalse(sanitizedHtml.contains("javascript:"), "JavaScript URLs should be removed");
// The link tag might still be there, but the href should be sanitized
assertTrue(sanitizedHtml.contains("Malicious Link"), "Link text should be preserved");
}
@Test
void testSanitizeAllowsTables() {
// Arrange - Testing Sanitizers.TABLES
String htmlWithTable =
"<table border=\"1\">"
+ "<thead><tr><th>Header 1</th><th>Header 2</th></tr></thead>"
+ "<tbody><tr><td>Cell 1</td><td>Cell 2</td></tr></tbody>"
+ "<tfoot><tr><td colspan=\"2\">Footer</td></tr></tfoot>"
+ "</table>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithTable);
// Assert
assertTrue(sanitizedHtml.contains("<table"), "Table should be preserved");
assertTrue(sanitizedHtml.contains("<tr>"), "Table rows should be preserved");
assertTrue(sanitizedHtml.contains("<th>"), "Table headers should be preserved");
assertTrue(sanitizedHtml.contains("<td>"), "Table cells should be preserved");
// Note: border attribute might be removed as it's deprecated in HTML5
// Check for content values instead of exact tag formats because
// the sanitizer may normalize tags and attributes
assertTrue(sanitizedHtml.contains("Header 1"), "Table header content should be preserved");
assertTrue(sanitizedHtml.contains("Cell 1"), "Table cell content should be preserved");
assertTrue(sanitizedHtml.contains("Footer"), "Table footer content should be preserved");
// OWASP sanitizer may not preserve these structural elements or attributes in the same
// format
// So we check for the content rather than the exact structure
}
@Test
void testSanitizeAllowsImages() {
// Arrange - Testing Sanitizers.IMAGES
String htmlWithImage =
"<img src=\"image.jpg\" alt=\"An image\" width=\"100\" height=\"100\">";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithImage);
// Assert
assertTrue(sanitizedHtml.contains("<img"), "Image tag should be preserved");
assertTrue(sanitizedHtml.contains("src=\"image.jpg\""), "Image source should be preserved");
assertTrue(
sanitizedHtml.contains("alt=\"An image\""), "Image alt text should be preserved");
// Width and height might be preserved, but not guaranteed by all sanitizers
}
@Test
void testSanitizeDisallowsDataUrlImages() {
// Arrange
String htmlWithDataUrlImage =
"<img src=\"data:image/svg+xml;base64,PHN2ZyBvbmxvYWQ9ImFsZXJ0KDEpIj48L3N2Zz4=\" alt=\"SVG with XSS\">";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithDataUrlImage);
// Assert
assertFalse(
sanitizedHtml.contains("data:image/svg"),
"Data URLs with potentially malicious content should be removed");
}
@Test
void testSanitizeRemovesJavaScriptInAttributes() {
// Arrange
String htmlWithJsEvent =
"<a href=\"#\" onclick=\"alert('XSS')\" onmouseover=\"alert('XSS')\">Click me</a>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithJsEvent);
// Assert
assertFalse(
sanitizedHtml.contains("onclick"), "JavaScript event handlers should be removed");
assertFalse(
sanitizedHtml.contains("onmouseover"),
"JavaScript event handlers should be removed");
assertTrue(sanitizedHtml.contains("Click me"), "Link text should be preserved");
}
@Test
void testSanitizeRemovesScriptTags() {
// Arrange
String htmlWithScript = "<p>Safe content</p><script>alert('XSS');</script>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithScript);
// Assert
assertFalse(sanitizedHtml.contains("<script>"), "Script tags should be removed");
assertTrue(
sanitizedHtml.contains("<p>Safe content</p>"), "Safe content should be preserved");
}
@Test
void testSanitizeRemovesNoScriptTags() {
// Arrange - Testing the custom policy to disallow noscript
String htmlWithNoscript = "<p>Safe content</p><noscript>JavaScript is disabled</noscript>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithNoscript);
// Assert
assertFalse(sanitizedHtml.contains("<noscript>"), "Noscript tags should be removed");
assertTrue(
sanitizedHtml.contains("<p>Safe content</p>"), "Safe content should be preserved");
}
@Test
void testSanitizeRemovesIframes() {
// Arrange
String htmlWithIframe = "<p>Safe content</p><iframe src=\"https://example.com\"></iframe>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithIframe);
// Assert
assertFalse(sanitizedHtml.contains("<iframe"), "Iframe tags should be removed");
assertTrue(
sanitizedHtml.contains("<p>Safe content</p>"), "Safe content should be preserved");
}
@Test
void testSanitizeRemovesObjectAndEmbed() {
// Arrange
String htmlWithObjects =
"<p>Safe content</p>"
+ "<object data=\"data.swf\" type=\"application/x-shockwave-flash\"></object>"
+ "<embed src=\"embed.swf\" type=\"application/x-shockwave-flash\">";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithObjects);
// Assert
assertFalse(sanitizedHtml.contains("<object"), "Object tags should be removed");
assertFalse(sanitizedHtml.contains("<embed"), "Embed tags should be removed");
assertTrue(
sanitizedHtml.contains("<p>Safe content</p>"), "Safe content should be preserved");
}
@Test
void testSanitizeRemovesMetaAndBaseAndLink() {
// Arrange
String htmlWithMetaTags =
"<p>Safe content</p>"
+ "<meta http-equiv=\"refresh\" content=\"0; url=http://evil.com\">"
+ "<base href=\"http://evil.com/\">"
+ "<link rel=\"stylesheet\" href=\"evil.css\">";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(htmlWithMetaTags);
// Assert
assertFalse(sanitizedHtml.contains("<meta"), "Meta tags should be removed");
assertFalse(sanitizedHtml.contains("<base"), "Base tags should be removed");
assertFalse(sanitizedHtml.contains("<link"), "Link tags should be removed");
assertTrue(
sanitizedHtml.contains("<p>Safe content</p>"), "Safe content should be preserved");
}
@Test
void testSanitizeHandlesComplexHtml() {
// Arrange
String complexHtml =
"<div class=\"container\">"
+ " <h1 style=\"color: blue;\">Welcome</h1>"
+ " <p>This is a <strong>test</strong> with <a href=\"https://example.com\">link</a>.</p>"
+ " <table>"
+ " <tr><th>Name</th><th>Value</th></tr>"
+ " <tr><td>Item 1</td><td>100</td></tr>"
+ " </table>"
+ " <img src=\"image.jpg\" alt=\"Test image\">"
+ " <script>alert('XSS');</script>"
+ " <iframe src=\"https://evil.com\"></iframe>"
+ "</div>";
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(complexHtml);
// Assert
assertTrue(sanitizedHtml.contains("<div"), "Div should be preserved");
assertTrue(sanitizedHtml.contains("<h1"), "H1 should be preserved");
assertTrue(
sanitizedHtml.contains("<strong>") && sanitizedHtml.contains("test"),
"Strong tag should be preserved");
// Check for content rather than exact formatting
assertTrue(
sanitizedHtml.contains("<a")
&& sanitizedHtml.contains("href=")
&& sanitizedHtml.contains("example.com")
&& sanitizedHtml.contains("link"),
"Link should be preserved");
assertTrue(sanitizedHtml.contains("<table"), "Table should be preserved");
assertTrue(sanitizedHtml.contains("<img"), "Image should be preserved");
assertFalse(sanitizedHtml.contains("<script>"), "Script tag should be removed");
assertFalse(sanitizedHtml.contains("<iframe"), "Iframe tag should be removed");
// Content checks
assertTrue(sanitizedHtml.contains("Welcome"), "Heading content should be preserved");
assertTrue(sanitizedHtml.contains("Name"), "Table header content should be preserved");
assertTrue(sanitizedHtml.contains("Item 1"), "Table data content should be preserved");
}
@Test
void testSanitizeHandlesEmpty() {
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize("");
// Assert
assertEquals("", sanitizedHtml, "Empty input should result in empty string");
}
@Test
void testSanitizeHandlesNull() {
// Act
String sanitizedHtml = customHtmlSanitizer.sanitize(null);
// Assert
assertEquals("", sanitizedHtml, "Null input should result in empty string");
}
}

Some files were not shown because too many files have changed in this diff Show More