1
0

V1.1 Änderungen

This commit is contained in:
2026-04-09 05:42:02 +02:00
parent 39800b6ea8
commit 5099ff4aca
44 changed files with 4912 additions and 957 deletions

View File

@@ -0,0 +1,59 @@
package de.gecheckt.pdf.umbenenner.application.config.provider;
import java.util.Arrays;
import java.util.Optional;
/**
* Supported AI provider families for the PDF renaming process.
* <p>
* Each constant represents a distinct API protocol family. Exactly one provider family
* is active per application run, selected via the {@code ai.provider.active} configuration property.
* <p>
* The {@link #getIdentifier()} method returns the string that must appear as the value of
* {@code ai.provider.active} to activate the corresponding provider family.
* Use {@link #fromIdentifier(String)} to resolve a configuration string to the enum constant.
*/
public enum AiProviderFamily {
/** OpenAI-compatible Chat Completions API usable with OpenAI itself and compatible third-party endpoints. */
OPENAI_COMPATIBLE("openai-compatible"),
/** Native Anthropic Messages API for Claude models. */
CLAUDE("claude");
private final String identifier;
AiProviderFamily(String identifier) {
this.identifier = identifier;
}
/**
* Returns the configuration identifier string for this provider family.
* <p>
* This value corresponds to valid values of the {@code ai.provider.active} property.
*
* @return the configuration identifier, never {@code null}
*/
public String getIdentifier() {
return identifier;
}
/**
* Resolves a provider family from its configuration identifier string.
* <p>
* The comparison is case-sensitive and matches the exact identifier strings
* defined by each constant (e.g., {@code "openai-compatible"}, {@code "claude"}).
*
* @param identifier the identifier as it appears in the {@code ai.provider.active} property;
* {@code null} returns an empty Optional
* @return the matching provider family, or {@link Optional#empty()} if not recognized
*/
public static Optional<AiProviderFamily> fromIdentifier(String identifier) {
if (identifier == null) {
return Optional.empty();
}
return Arrays.stream(values())
.filter(f -> f.identifier.equals(identifier))
.findFirst();
}
}

View File

@@ -0,0 +1,43 @@
package de.gecheckt.pdf.umbenenner.application.config.provider;
/**
* Immutable multi-provider configuration model.
* <p>
* Represents the resolved configuration for both supported AI provider families
* together with the selection of the one provider family that is active for this
* application run.
*
* <h2>Invariants</h2>
* <ul>
* <li>Exactly one provider family is active per run.</li>
* <li>Required fields are enforced only for the active provider; the inactive
* provider's configuration may be incomplete.</li>
* <li>Validation of these invariants is performed by the corresponding validator
* in the adapter layer, not by this record itself.</li>
* </ul>
*
* @param activeProviderFamily the selected provider family for this run; {@code null}
* indicates that {@code ai.provider.active} was absent or
* held an unrecognised value the validator will reject this
* @param openAiCompatibleConfig configuration for the OpenAI-compatible provider family
* @param claudeConfig configuration for the Anthropic Claude provider family
*/
public record MultiProviderConfiguration(
AiProviderFamily activeProviderFamily,
ProviderConfiguration openAiCompatibleConfig,
ProviderConfiguration claudeConfig) {
/**
* Returns the {@link ProviderConfiguration} for the currently active provider family.
*
* @return the active provider's configuration, never {@code null} when
* {@link #activeProviderFamily()} is not {@code null}
* @throws NullPointerException if {@code activeProviderFamily} is {@code null}
*/
public ProviderConfiguration activeProviderConfiguration() {
return switch (activeProviderFamily) {
case OPENAI_COMPATIBLE -> openAiCompatibleConfig;
case CLAUDE -> claudeConfig;
};
}
}

View File

@@ -0,0 +1,34 @@
package de.gecheckt.pdf.umbenenner.application.config.provider;
/**
* Immutable configuration for a single AI provider family.
* <p>
* Holds all parameters needed to connect to and authenticate with one AI provider endpoint.
* Instances are created by the configuration parser in the adapter layer; validation
* of required fields is performed by the corresponding validator.
*
* <h2>Field semantics</h2>
* <ul>
* <li>{@code model} the AI model name; required for the active provider, may be {@code null}
* for the inactive provider.</li>
* <li>{@code timeoutSeconds} HTTP connection/read timeout in seconds; must be positive for
* the active provider. {@code 0} indicates the value was not configured.</li>
* <li>{@code baseUrl} the base URL of the API endpoint. For the Anthropic Claude family a
* default of {@code https://api.anthropic.com} is applied by the parser when the property
* is absent; for the OpenAI-compatible family it is required and may not be {@code null}.</li>
* <li>{@code apiKey} the resolved API key after environment-variable precedence has been
* applied; may be blank for the inactive provider, must not be blank for the active provider.</li>
* </ul>
*
* @param model the AI model name; {@code null} when not configured
* @param timeoutSeconds HTTP timeout in seconds; {@code 0} when not configured
* @param baseUrl the base URL of the API endpoint; {@code null} when not configured
* (only applicable to providers without a built-in default)
* @param apiKey the resolved API key; blank when not configured
*/
public record ProviderConfiguration(
String model,
int timeoutSeconds,
String baseUrl,
String apiKey) {
}

View File

@@ -1,16 +1,24 @@
package de.gecheckt.pdf.umbenenner.application.config.startup;
import java.net.URI;
import java.nio.file.Path;
import de.gecheckt.pdf.umbenenner.application.config.provider.MultiProviderConfiguration;
/**
* Typed immutable configuration model for PDF Umbenenner startup parameters.
* <p>
* Contains all technical infrastructure and runtime configuration parameters
* loaded and validated at bootstrap time. This is a complete configuration model
* for the entire application startup, including paths, API settings, persistence,
* for the entire application startup, including paths, AI provider selection, persistence,
* and operational parameters.
*
* <h2>AI provider configuration</h2>
* <p>
* The {@link MultiProviderConfiguration} encapsulates the active provider selection
* together with the per-provider connection parameters for all supported provider families.
* Exactly one provider family is active per run; the selection is driven by the
* {@code ai.provider.active} configuration property.
*
* <h2>AI content sensitivity ({@code log.ai.sensitive})</h2>
* <p>
* The boolean property {@code log.ai.sensitive} controls whether sensitive AI-generated
@@ -25,9 +33,7 @@ public record StartConfiguration(
Path sourceFolder,
Path targetFolder,
Path sqliteFile,
URI apiBaseUrl,
String apiModel,
int apiTimeoutSeconds,
MultiProviderConfiguration multiProviderConfiguration,
int maxRetriesTransient,
int maxPages,
int maxTextCharacters,
@@ -35,7 +41,6 @@ public record StartConfiguration(
Path runtimeLockFile,
Path logDirectory,
String logLevel,
String apiKey,
/**
* Whether sensitive AI content (raw response, reasoning) may be written to log files.

View File

@@ -42,6 +42,10 @@ import java.util.Objects;
* successful or skip attempts.</li>
* <li>{@link #retryable()} — {@code true} if the failure is considered retryable in a
* later run; {@code false} for final failures, successes, and skip attempts.</li>
* <li>{@link #aiProvider()} — opaque identifier of the AI provider that was active
* during this attempt (e.g. {@code "openai-compatible"} or {@code "claude"});
* {@code null} for attempts that did not involve an AI call (skip, pre-check
* failure) or for historical attempts recorded before this field was introduced.</li>
* <li>{@link #modelName()} — the AI model name used in this attempt; {@code null} if
* no AI call was made (e.g. pre-check failures or skip attempts).</li>
* <li>{@link #promptIdentifier()} — stable identifier of the prompt template used;
@@ -74,6 +78,7 @@ import java.util.Objects;
* @param failureClass failure classification, or {@code null} for non-failure statuses
* @param failureMessage failure description, or {@code null} for non-failure statuses
* @param retryable whether this failure should be retried in a later run
* @param aiProvider opaque AI provider identifier for this attempt, or {@code null}
* @param modelName AI model name, or {@code null} if no AI call was made
* @param promptIdentifier prompt identifier, or {@code null} if no AI call was made
* @param processedPageCount number of PDF pages processed, or {@code null}
@@ -97,6 +102,7 @@ public record ProcessingAttempt(
String failureMessage,
boolean retryable,
// AI traceability fields (null for non-AI attempts)
String aiProvider,
String modelName,
String promptIdentifier,
Integer processedPageCount,
@@ -131,7 +137,8 @@ public record ProcessingAttempt(
* Creates a {@link ProcessingAttempt} with no AI traceability fields set.
* <p>
* Convenience factory for pre-check failures, skip events, and any attempt
* that does not involve an AI call.
* that does not involve an AI call. The {@link #aiProvider()} field is set
* to {@code null}.
*
* @param fingerprint document identity; must not be null
* @param runId batch run identifier; must not be null
@@ -157,6 +164,6 @@ public record ProcessingAttempt(
return new ProcessingAttempt(
fingerprint, runId, attemptNumber, startedAt, endedAt,
status, failureClass, failureMessage, retryable,
null, null, null, null, null, null, null, null, null, null);
null, null, null, null, null, null, null, null, null, null, null);
}
}

View File

@@ -154,15 +154,22 @@ public class DocumentProcessingCoordinator {
private final TargetFileCopyPort targetFileCopyPort;
private final ProcessingLogger logger;
private final int maxRetriesTransient;
private final String activeProviderIdentifier;
/**
* Creates the document processing coordinator with all required ports, logger, and
* the transient retry limit.
* Creates the document processing coordinator with all required ports, logger,
* the transient retry limit, and the active AI provider identifier.
* <p>
* {@code maxRetriesTransient} is the maximum number of historised transient error attempts
* per fingerprint before the document is finalised to
* {@link ProcessingStatus#FAILED_FINAL}. The attempt that causes the counter to
* reach this value finalises the document. Must be &gt;= 1.
* <p>
* {@code activeProviderIdentifier} is the opaque string identifier of the AI provider
* that is active for this run (e.g. {@code "openai-compatible"} or {@code "claude"}).
* It is written to the attempt history for every attempt that involves an AI call,
* enabling provider-level traceability per attempt without introducing
* provider-specific logic in the application layer.
*
* @param documentRecordRepository port for reading and writing the document master record;
* must not be null
@@ -176,8 +183,11 @@ public class DocumentProcessingCoordinator {
* @param logger for processing-related logging; must not be null
* @param maxRetriesTransient maximum number of historised transient error attempts
* before finalisation; must be &gt;= 1
* @param activeProviderIdentifier opaque identifier of the active AI provider for this run;
* must not be null or blank
* @throws NullPointerException if any object parameter is null
* @throws IllegalArgumentException if {@code maxRetriesTransient} is less than 1
* @throws IllegalArgumentException if {@code maxRetriesTransient} is less than 1, or
* if {@code activeProviderIdentifier} is blank
*/
public DocumentProcessingCoordinator(
DocumentRecordRepository documentRecordRepository,
@@ -186,11 +196,16 @@ public class DocumentProcessingCoordinator {
TargetFolderPort targetFolderPort,
TargetFileCopyPort targetFileCopyPort,
ProcessingLogger logger,
int maxRetriesTransient) {
int maxRetriesTransient,
String activeProviderIdentifier) {
if (maxRetriesTransient < 1) {
throw new IllegalArgumentException(
"maxRetriesTransient must be >= 1, got: " + maxRetriesTransient);
}
Objects.requireNonNull(activeProviderIdentifier, "activeProviderIdentifier must not be null");
if (activeProviderIdentifier.isBlank()) {
throw new IllegalArgumentException("activeProviderIdentifier must not be blank");
}
this.documentRecordRepository =
Objects.requireNonNull(documentRecordRepository, "documentRecordRepository must not be null");
this.processingAttemptRepository =
@@ -203,6 +218,7 @@ public class DocumentProcessingCoordinator {
Objects.requireNonNull(targetFileCopyPort, "targetFileCopyPort must not be null");
this.logger = Objects.requireNonNull(logger, "logger must not be null");
this.maxRetriesTransient = maxRetriesTransient;
this.activeProviderIdentifier = activeProviderIdentifier;
}
/**
@@ -503,7 +519,7 @@ public class DocumentProcessingCoordinator {
ProcessingAttempt successAttempt = new ProcessingAttempt(
fingerprint, context.runId(), attemptNumber, attemptStart, now,
ProcessingStatus.SUCCESS, null, null, false,
null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null,
resolvedFilename);
DocumentRecord successRecord = buildSuccessRecord(
@@ -951,6 +967,7 @@ public class DocumentProcessingCoordinator {
yield new ProcessingAttempt(
fingerprint, context.runId(), attemptNumber, startedAt, endedAt,
outcome.overallStatus(), failureClass, failureMessage, outcome.retryable(),
activeProviderIdentifier,
ctx.modelName(), ctx.promptIdentifier(),
ctx.processedPageCount(), ctx.sentCharacterCount(),
ctx.aiRawResponse(),
@@ -964,6 +981,7 @@ public class DocumentProcessingCoordinator {
yield new ProcessingAttempt(
fingerprint, context.runId(), attemptNumber, startedAt, endedAt,
outcome.overallStatus(), failureClass, failureMessage, outcome.retryable(),
activeProviderIdentifier,
ctx.modelName(), ctx.promptIdentifier(),
ctx.processedPageCount(), ctx.sentCharacterCount(),
ctx.aiRawResponse(),
@@ -976,6 +994,7 @@ public class DocumentProcessingCoordinator {
yield new ProcessingAttempt(
fingerprint, context.runId(), attemptNumber, startedAt, endedAt,
outcome.overallStatus(), failureClass, failureMessage, outcome.retryable(),
activeProviderIdentifier,
ctx.modelName(), ctx.promptIdentifier(),
ctx.processedPageCount(), ctx.sentCharacterCount(),
ctx.aiRawResponse(),

View File

@@ -90,7 +90,7 @@ class DocumentProcessingCoordinatorTest {
unitOfWorkPort = new CapturingUnitOfWorkPort(recordRepo, attemptRepo);
processor = new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
candidate = new SourceDocumentCandidate(
"test.pdf", 1024L, new SourceDocumentLocator("/tmp/test.pdf"));
@@ -250,7 +250,8 @@ class DocumentProcessingCoordinatorTest {
// With maxRetriesTransient=1, the very first transient error finalises the document
DocumentProcessingCoordinator coordinatorWith1Retry = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 1);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 1,
"openai-compatible");
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome outcome = new TechnicalDocumentError(
@@ -668,7 +669,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
recordRepo.setLookupResult(new PersistenceLookupTechnicalFailure("Datenbank nicht erreichbar", null));
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
@@ -686,7 +687,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
DocumentRecord existingRecord = buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
DocumentProcessingOutcome outcome = new PreCheckPassed(
@@ -705,7 +706,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
DocumentRecord existingRecord = buildRecord(ProcessingStatus.FAILED_FINAL, new FailureCounters(2, 0));
recordRepo.setLookupResult(new DocumentTerminalFinalFailure(existingRecord));
DocumentProcessingOutcome outcome = new PreCheckFailed(
@@ -724,7 +725,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
@@ -742,7 +743,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
recordRepo.setLookupResult(new DocumentUnknown());
unitOfWorkPort.failOnExecute = true;
DocumentProcessingOutcome outcome = new PreCheckPassed(
@@ -761,7 +762,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
DocumentRecord existingRecord = buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
DocumentProcessingOutcome outcome = new PreCheckPassed(
@@ -780,7 +781,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
DocumentRecord existingRecord = buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
unitOfWorkPort.failOnExecute = true;
@@ -848,6 +849,7 @@ class DocumentProcessingCoordinatorTest {
ProcessingAttempt badProposal = new ProcessingAttempt(
fingerprint, context.runId(), 1, Instant.now(), Instant.now(),
ProcessingStatus.PROPOSAL_READY, null, null, false,
null,
"model", "prompt", 1, 100, "{}", "reason",
null, DateSource.AI_PROVIDED, "Rechnung", null);
attemptRepo.savedAttempts.add(badProposal);
@@ -871,7 +873,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithFailingFolder = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new FailingTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
boolean result = coordinatorWithFailingFolder.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> null);
@@ -893,7 +895,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithFailingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new FailingTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
boolean result = coordinatorWithFailingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> null);
@@ -915,6 +917,7 @@ class DocumentProcessingCoordinatorTest {
ProcessingAttempt badProposal = new ProcessingAttempt(
fingerprint, context.runId(), 1, Instant.now(), Instant.now(),
ProcessingStatus.PROPOSAL_READY, null, null, false,
null,
"model", "prompt", 1, 100, "{}", "reason",
LocalDate.of(2026, 1, 15), DateSource.AI_PROVIDED,
"A".repeat(21), null);
@@ -941,6 +944,7 @@ class DocumentProcessingCoordinatorTest {
ProcessingAttempt badProposal = new ProcessingAttempt(
fingerprint, context.runId(), 1, Instant.now(), Instant.now(),
ProcessingStatus.PROPOSAL_READY, null, null, false,
null,
"model", "prompt", 1, 100, "{}", "reason",
LocalDate.of(2026, 1, 15), DateSource.AI_PROVIDED,
"Rechnung-2026", null);
@@ -980,7 +984,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCountingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), countingCopyPort, new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
boolean result = coordinatorWithCountingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> {
@@ -1014,7 +1018,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCountingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), countingCopyPort, new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
boolean result = coordinatorWithCountingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> null);
@@ -1044,7 +1048,8 @@ class DocumentProcessingCoordinatorTest {
CountingTargetFileCopyPort failingCopy = new CountingTargetFileCopyPort(2); // fail both
DocumentProcessingCoordinator coordinatorWith1Retry = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), failingCopy, new NoOpProcessingLogger(), 1);
new NoOpTargetFolderPort(), failingCopy, new NoOpProcessingLogger(), 1,
"openai-compatible");
boolean result = coordinatorWith1Retry.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> null);
@@ -1079,7 +1084,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new FailingTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -1105,7 +1110,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new FailingTargetFileCopyPort(), capturingLogger,
1 /* maxRetriesTransient=1 → immediately final */);
1 /* maxRetriesTransient=1 → immediately final */, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -1128,7 +1133,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCountingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), countingCopyPort, new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCountingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart,
@@ -1197,7 +1202,8 @@ class DocumentProcessingCoordinatorTest {
// maxRetriesTransient=2: first transient error → FAILED_RETRYABLE, second → FAILED_FINAL
DocumentProcessingCoordinator coordinatorWith2Retries = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 2);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 2,
"openai-compatible");
DocumentProcessingOutcome transientError = new TechnicalDocumentError(candidate, "Timeout", null);
// Run 1: new document, first transient error → FAILED_RETRYABLE, transientErrorCount=1
@@ -1233,6 +1239,7 @@ class DocumentProcessingCoordinatorTest {
return new ProcessingAttempt(
fingerprint, context.runId(), 1, Instant.now(), Instant.now(),
ProcessingStatus.PROPOSAL_READY, null, null, false,
"openai-compatible",
"gpt-4", "prompt-v1.txt", 1, 500, "{}", "reason",
LocalDate.of(2026, 1, 15), DateSource.AI_PROVIDED, "Rechnung", null);
}
@@ -1495,7 +1502,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
recordRepo.setLookupResult(new DocumentTerminalSuccess(
buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero())));
@@ -1516,7 +1523,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
recordRepo.setLookupResult(new DocumentTerminalFinalFailure(
buildRecord(ProcessingStatus.FAILED_FINAL, new FailureCounters(2, 0))));
@@ -1537,7 +1544,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
recordRepo.setLookupResult(new DocumentUnknown());
coordinatorWithCapturing.process(candidate, fingerprint,
@@ -1560,7 +1567,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
// Existing record already has one content error — second content error finalises
recordRepo.setLookupResult(new DocumentKnownProcessable(
buildRecord(ProcessingStatus.FAILED_RETRYABLE, new FailureCounters(1, 0))));
@@ -1596,7 +1603,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -1612,6 +1619,7 @@ class DocumentProcessingCoordinatorTest {
ProcessingAttempt badProposal = new ProcessingAttempt(
fingerprint, context.runId(), 1, Instant.now(), Instant.now(),
ProcessingStatus.PROPOSAL_READY, null, null, false,
null,
"model", "prompt", 1, 100, "{}", "reason",
null, DateSource.AI_PROVIDED, "Rechnung", null);
attemptRepo.savedAttempts.add(badProposal);
@@ -1620,7 +1628,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -1639,7 +1647,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new FailingTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -1658,7 +1666,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(
candidate, fingerprint, context, attemptStart,
@@ -1680,7 +1688,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), onlyFirstFails, capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(
candidate, fingerprint, context, attemptStart,
@@ -1702,7 +1710,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), bothFail, capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> null);
@@ -1723,7 +1731,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), onlyFirstFails, capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(
candidate, fingerprint, context, attemptStart,
@@ -1843,7 +1851,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(
candidate, fingerprint, context, attemptStart,
@@ -1873,7 +1881,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
capturingFolderPort, new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -1897,7 +1905,7 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithCapturing = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DEFAULT_MAX_RETRIES_TRANSIENT, "openai-compatible");
coordinatorWithCapturing.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);

View File

@@ -356,6 +356,7 @@ class TargetFilenameBuildingServiceTest {
Instant.now(), Instant.now(),
ProcessingStatus.PROPOSAL_READY,
null, null, false,
"openai-compatible",
"gpt-4", "prompt-v1.txt", 1, 100,
"{}", "reasoning text",
date, DateSource.AI_PROVIDED, title,

View File

@@ -469,7 +469,7 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator failingProcessor = new DocumentProcessingCoordinator(
new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(),
new NoOpUnitOfWorkPort(), new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(),
new NoOpProcessingLogger(), 3) {
new NoOpProcessingLogger(), 3, "openai-compatible") {
@Override
public boolean processDeferredOutcome(
de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate candidate,
@@ -517,7 +517,7 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator selectiveFailingProcessor = new DocumentProcessingCoordinator(
new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(),
new NoOpUnitOfWorkPort(), new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(),
new NoOpProcessingLogger(), 3) {
new NoOpProcessingLogger(), 3, "openai-compatible") {
private int callCount = 0;
@Override
@@ -760,7 +760,8 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator realCoordinator = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWork,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3,
"openai-compatible");
// Fingerprint port returns the pre-defined fingerprint for this candidate
FingerprintPort fixedFingerprintPort = c -> new FingerprintSuccess(fingerprint);
@@ -807,7 +808,8 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator realCoordinator = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWork,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3,
"openai-compatible");
FingerprintPort fixedFingerprintPort = c -> new FingerprintSuccess(fingerprint);
@@ -860,7 +862,8 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator realCoordinator = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWork,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3,
"openai-compatible");
FingerprintPort perCandidateFingerprintPort = candidate -> {
if (candidate.uniqueIdentifier().equals("terminal.pdf")) return new FingerprintSuccess(terminalFp);
@@ -1152,7 +1155,8 @@ class BatchRunProcessingUseCaseTest {
private static class NoOpDocumentProcessingCoordinator extends DocumentProcessingCoordinator {
NoOpDocumentProcessingCoordinator() {
super(new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(), new NoOpUnitOfWorkPort(),
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3,
"openai-compatible");
}
}
@@ -1164,7 +1168,8 @@ class BatchRunProcessingUseCaseTest {
TrackingDocumentProcessingCoordinator() {
super(new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(), new NoOpUnitOfWorkPort(),
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3,
"openai-compatible");
}
@Override