1
0

Umsetzung von Meilenstein M7

This commit is contained in:
2026-04-07 17:26:02 +02:00
parent ffd91c766d
commit e9e9b2d17a
30 changed files with 2328 additions and 206 deletions

View File

@@ -0,0 +1,189 @@
package de.gecheckt.pdf.umbenenner.application.port.out;
import de.gecheckt.pdf.umbenenner.domain.model.DocumentFingerprint;
import de.gecheckt.pdf.umbenenner.domain.model.RunId;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* Tests for the {@link DocumentLogCorrelation} sealed type and its two permitted implementations.
* <p>
* Verifies:
* <ul>
* <li>{@link DocumentLogCorrelation.CandidateCorrelation} stores the run identifier and
* candidate description correctly (pre-fingerprint phase).</li>
* <li>{@link DocumentLogCorrelation.FingerprintCorrelation} stores the run identifier and
* fingerprint correctly (post-fingerprint phase).</li>
* <li>The sealed type contract: only the two permitted subtypes exist.</li>
* </ul>
*/
class DocumentLogCorrelationTest {
private static final String RUN_ID_VALUE = "run-correlation-test-001";
private static final String CANDIDATE_DESCRIPTION = "invoice-2026-01-15.pdf";
private static final String FINGERPRINT_HEX = "a".repeat(64);
// -------------------------------------------------------------------------
// CandidateCorrelation pre-fingerprint phase
// -------------------------------------------------------------------------
@Test
void candidateCorrelation_storesRunId() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation.CandidateCorrelation correlation =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
assertEquals(runId, correlation.runId());
}
@Test
void candidateCorrelation_storesCandidateDescription() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation.CandidateCorrelation correlation =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
assertEquals(CANDIDATE_DESCRIPTION, correlation.candidateDescription());
}
@Test
void candidateCorrelation_runIdAccessibleViaInterface() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation correlation =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
// runId() is declared on the sealed interface and must be accessible polymorphically
assertEquals(runId, correlation.runId());
}
@Test
void candidateCorrelation_twoInstancesWithSameDataAreEqual() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation.CandidateCorrelation first =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
DocumentLogCorrelation.CandidateCorrelation second =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
assertEquals(first, second);
}
@Test
void candidateCorrelation_implementsDocumentLogCorrelation() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation.CandidateCorrelation correlation =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
assertInstanceOf(DocumentLogCorrelation.class, correlation);
}
// -------------------------------------------------------------------------
// FingerprintCorrelation post-fingerprint phase
// -------------------------------------------------------------------------
@Test
void fingerprintCorrelation_storesRunId() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentFingerprint fingerprint = new DocumentFingerprint(FINGERPRINT_HEX);
DocumentLogCorrelation.FingerprintCorrelation correlation =
new DocumentLogCorrelation.FingerprintCorrelation(runId, fingerprint);
assertEquals(runId, correlation.runId());
}
@Test
void fingerprintCorrelation_storesFingerprint() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentFingerprint fingerprint = new DocumentFingerprint(FINGERPRINT_HEX);
DocumentLogCorrelation.FingerprintCorrelation correlation =
new DocumentLogCorrelation.FingerprintCorrelation(runId, fingerprint);
assertEquals(fingerprint, correlation.fingerprint());
}
@Test
void fingerprintCorrelation_runIdAccessibleViaInterface() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentFingerprint fingerprint = new DocumentFingerprint(FINGERPRINT_HEX);
DocumentLogCorrelation correlation =
new DocumentLogCorrelation.FingerprintCorrelation(runId, fingerprint);
// runId() is declared on the sealed interface and must be accessible polymorphically
assertEquals(runId, correlation.runId());
}
@Test
void fingerprintCorrelation_twoInstancesWithSameDataAreEqual() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentFingerprint fingerprint = new DocumentFingerprint(FINGERPRINT_HEX);
DocumentLogCorrelation.FingerprintCorrelation first =
new DocumentLogCorrelation.FingerprintCorrelation(runId, fingerprint);
DocumentLogCorrelation.FingerprintCorrelation second =
new DocumentLogCorrelation.FingerprintCorrelation(runId, fingerprint);
assertEquals(first, second);
}
@Test
void fingerprintCorrelation_implementsDocumentLogCorrelation() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentFingerprint fingerprint = new DocumentFingerprint(FINGERPRINT_HEX);
DocumentLogCorrelation.FingerprintCorrelation correlation =
new DocumentLogCorrelation.FingerprintCorrelation(runId, fingerprint);
assertInstanceOf(DocumentLogCorrelation.class, correlation);
}
// -------------------------------------------------------------------------
// Sealed type structural contract
// -------------------------------------------------------------------------
@Test
void sealedType_patternMatchExhaustsAllPermittedSubtypes() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation candidatePhase =
new DocumentLogCorrelation.CandidateCorrelation(runId, CANDIDATE_DESCRIPTION);
DocumentLogCorrelation fingerprintPhase =
new DocumentLogCorrelation.FingerprintCorrelation(runId, new DocumentFingerprint(FINGERPRINT_HEX));
// Pattern match on the sealed type must compile exhaustively for exactly these two cases
String candidatePhaseResult = describe(candidatePhase);
String fingerprintPhaseResult = describe(fingerprintPhase);
assertEquals("candidate", candidatePhaseResult);
assertEquals("fingerprint", fingerprintPhaseResult);
}
/** Helper method using an exhaustive switch over the sealed type. */
private static String describe(DocumentLogCorrelation correlation) {
return switch (correlation) {
case DocumentLogCorrelation.CandidateCorrelation ignored -> "candidate";
case DocumentLogCorrelation.FingerprintCorrelation ignored -> "fingerprint";
};
}
@Test
void candidateCorrelation_differentDescriptions_areNotEqual() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentLogCorrelation.CandidateCorrelation withFirst =
new DocumentLogCorrelation.CandidateCorrelation(runId, "first.pdf");
DocumentLogCorrelation.CandidateCorrelation withSecond =
new DocumentLogCorrelation.CandidateCorrelation(runId, "second.pdf");
assertNotEquals(withFirst, withSecond);
}
@Test
void fingerprintCorrelation_differentFingerprints_areNotEqual() {
RunId runId = new RunId(RUN_ID_VALUE);
DocumentFingerprint first = new DocumentFingerprint("a".repeat(64));
DocumentFingerprint second = new DocumentFingerprint("b".repeat(64));
DocumentLogCorrelation.FingerprintCorrelation withFirst =
new DocumentLogCorrelation.FingerprintCorrelation(runId, first);
DocumentLogCorrelation.FingerprintCorrelation withSecond =
new DocumentLogCorrelation.FingerprintCorrelation(runId, second);
assertNotEquals(withFirst, withSecond);
}
}

View File

@@ -0,0 +1,320 @@
package de.gecheckt.pdf.umbenenner.application.service;
import de.gecheckt.pdf.umbenenner.application.port.out.DocumentErrorClassification;
import de.gecheckt.pdf.umbenenner.application.port.out.FailureCounters;
import de.gecheckt.pdf.umbenenner.application.port.out.ImmediateRetryDecision;
import de.gecheckt.pdf.umbenenner.application.port.out.RetryDecision;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* Tests for {@link DefaultRetryDecisionEvaluator}.
* <p>
* Verifies the binding retry policy rules for deterministic content errors,
* transient technical errors, target copy failures, and the within-run
* immediate retry mechanism.
*/
class DefaultRetryDecisionEvaluatorTest {
private static final String FAILURE_CLASS = "SOME_FAILURE";
private static final String FAILURE_MESSAGE = "Something went wrong";
private DefaultRetryDecisionEvaluator evaluator;
@BeforeEach
void setUp() {
evaluator = new DefaultRetryDecisionEvaluator();
}
// -------------------------------------------------------------------------
// Deterministic content error rules
// -------------------------------------------------------------------------
@Test
void evaluate_firstContentError_returnsContentErrorRetryable() {
FailureCounters counters = new FailureCounters(0, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
counters, 1, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.ContentErrorRetryable.class, decision);
RetryDecision.ContentErrorRetryable retryable = (RetryDecision.ContentErrorRetryable) decision;
assertEquals(FAILURE_CLASS, retryable.failureClass());
assertEquals(FAILURE_MESSAGE, retryable.failureMessage());
}
@Test
void evaluate_secondContentError_returnsContentErrorFinal() {
FailureCounters counters = new FailureCounters(1, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
counters, 1, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.ContentErrorFinal.class, decision);
RetryDecision.ContentErrorFinal finalDecision = (RetryDecision.ContentErrorFinal) decision;
assertEquals(FAILURE_CLASS, finalDecision.failureClass());
assertEquals(FAILURE_MESSAGE, finalDecision.failureMessage());
}
@Test
void evaluate_subsequentContentErrors_alwaysReturnContentErrorFinal() {
// Any count >= 1 results in final (covers legacy M4-M6 data with higher counts)
for (int count = 1; count <= 5; count++) {
FailureCounters counters = new FailureCounters(count, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
counters, 1, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.ContentErrorFinal.class, decision,
"Expected ContentErrorFinal for contentErrorCount=" + count);
}
}
@Test
void evaluate_contentError_transientCounterIsIrrelevant() {
// Non-zero transient counter must not affect content error decision
FailureCounters counters = new FailureCounters(0, 5);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
counters, 1, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.ContentErrorRetryable.class, decision);
}
// -------------------------------------------------------------------------
// Transient technical error rules
// -------------------------------------------------------------------------
@Test
void evaluate_transientError_maxRetriesTransientOne_firstError_returnsTransientErrorFinal() {
// maxRetriesTransient=1: counter before=0, after=1=limit → final immediately
FailureCounters counters = new FailureCounters(0, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 1, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorFinal.class, decision,
"With maxRetriesTransient=1, first transient error must be final");
RetryDecision.TransientErrorFinal finalDecision = (RetryDecision.TransientErrorFinal) decision;
assertEquals(FAILURE_CLASS, finalDecision.failureClass());
assertEquals(FAILURE_MESSAGE, finalDecision.failureMessage());
}
@Test
void evaluate_transientError_maxRetriesTransientTwo_firstError_returnsTransientErrorRetryable() {
// maxRetriesTransient=2: counter before=0, after=1 < 2 → retryable
FailureCounters counters = new FailureCounters(0, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 2, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorRetryable.class, decision);
RetryDecision.TransientErrorRetryable retryable = (RetryDecision.TransientErrorRetryable) decision;
assertEquals(FAILURE_CLASS, retryable.failureClass());
assertEquals(FAILURE_MESSAGE, retryable.failureMessage());
}
@Test
void evaluate_transientError_maxRetriesTransientTwo_secondError_returnsTransientErrorFinal() {
// maxRetriesTransient=2: counter before=1, after=2=limit → final
FailureCounters counters = new FailureCounters(0, 1);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 2, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorFinal.class, decision,
"With maxRetriesTransient=2, second transient error must be final");
}
@Test
void evaluate_transientError_maxRetriesTransientThree_firstError_returnsRetryable() {
FailureCounters counters = new FailureCounters(0, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 3, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorRetryable.class, decision);
}
@Test
void evaluate_transientError_maxRetriesTransientThree_secondError_returnsRetryable() {
FailureCounters counters = new FailureCounters(0, 1);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 3, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorRetryable.class, decision);
}
@Test
void evaluate_transientError_maxRetriesTransientThree_thirdError_returnsFinal() {
// counter before=2, after=3=limit → final
FailureCounters counters = new FailureCounters(0, 2);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 3, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorFinal.class, decision,
"Third transient error with maxRetriesTransient=3 must be final");
}
@Test
void evaluate_transientError_contentCounterIsIrrelevant() {
// Non-zero content error counter must not affect transient error decision
FailureCounters counters = new FailureCounters(1, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 2, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TransientErrorRetryable.class, decision);
}
@Test
void evaluate_transientError_legacyDataWithHigherCounts_finalizesCorrectly() {
// Existing M4-M6 data may have counter values beyond normal expectations;
// the evaluator must still apply the threshold check consistently
FailureCounters counters = new FailureCounters(3, 5);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 3, FAILURE_CLASS, FAILURE_MESSAGE);
// counter before=5, after=6 >= 3 → final
assertInstanceOf(RetryDecision.TransientErrorFinal.class, decision);
}
// -------------------------------------------------------------------------
// Target copy technical error rule
// -------------------------------------------------------------------------
@Test
void evaluate_targetCopyError_returnsTargetCopyWithImmediateRetry() {
FailureCounters counters = new FailureCounters(0, 0);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TARGET_COPY_TECHNICAL_ERROR,
counters, 1, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TargetCopyWithImmediateRetry.class, decision);
RetryDecision.TargetCopyWithImmediateRetry immediate =
(RetryDecision.TargetCopyWithImmediateRetry) decision;
assertEquals(FAILURE_MESSAGE, immediate.failureMessage());
assertEquals(DocumentErrorClassification.TARGET_COPY_TECHNICAL_ERROR.name(),
immediate.failureClass());
}
@Test
void evaluate_targetCopyError_countersAndMaxRetriesAreIgnored() {
// Target copy decision is independent of counters and maxRetriesTransient
FailureCounters counters = new FailureCounters(2, 3);
RetryDecision decision = evaluator.evaluate(
DocumentErrorClassification.TARGET_COPY_TECHNICAL_ERROR,
counters, 5, FAILURE_CLASS, FAILURE_MESSAGE);
assertInstanceOf(RetryDecision.TargetCopyWithImmediateRetry.class, decision);
}
// -------------------------------------------------------------------------
// Immediate within-run retry decision
// -------------------------------------------------------------------------
@Test
void evaluateImmediateRetry_firstAttempt_returnsAllowed() {
ImmediateRetryDecision decision = evaluator.evaluateImmediateRetry(true);
assertEquals(ImmediateRetryDecision.ALLOWED, decision);
}
@Test
void evaluateImmediateRetry_secondAttempt_returnsDenied() {
ImmediateRetryDecision decision = evaluator.evaluateImmediateRetry(false);
assertEquals(ImmediateRetryDecision.DENIED, decision);
}
// -------------------------------------------------------------------------
// Guard conditions
// -------------------------------------------------------------------------
@Test
void evaluate_throwsWhenMaxRetriesTransientIsZero() {
FailureCounters counters = FailureCounters.zero();
assertThrows(IllegalArgumentException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, 0, FAILURE_CLASS, FAILURE_MESSAGE));
}
@Test
void evaluate_throwsWhenMaxRetriesTransientIsNegative() {
FailureCounters counters = FailureCounters.zero();
assertThrows(IllegalArgumentException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.TRANSIENT_TECHNICAL_ERROR,
counters, -1, FAILURE_CLASS, FAILURE_MESSAGE));
}
@Test
void evaluate_throwsWhenErrorClassIsNull() {
assertThrows(NullPointerException.class, () ->
evaluator.evaluate(null, FailureCounters.zero(), 1,
FAILURE_CLASS, FAILURE_MESSAGE));
}
@Test
void evaluate_throwsWhenCountersAreNull() {
assertThrows(NullPointerException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
null, 1, FAILURE_CLASS, FAILURE_MESSAGE));
}
@Test
void evaluate_throwsWhenFailureClassIsNull() {
assertThrows(NullPointerException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
FailureCounters.zero(), 1, null, FAILURE_MESSAGE));
}
@Test
void evaluate_throwsWhenFailureClassIsBlank() {
assertThrows(IllegalArgumentException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
FailureCounters.zero(), 1, " ", FAILURE_MESSAGE));
}
@Test
void evaluate_throwsWhenFailureMessageIsNull() {
assertThrows(NullPointerException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
FailureCounters.zero(), 1, FAILURE_CLASS, null));
}
@Test
void evaluate_throwsWhenFailureMessageIsBlank() {
assertThrows(IllegalArgumentException.class, () ->
evaluator.evaluate(
DocumentErrorClassification.DETERMINISTIC_CONTENT_ERROR,
FailureCounters.zero(), 1, FAILURE_CLASS, " "));
}
}

View File

@@ -70,6 +70,9 @@ class DocumentProcessingCoordinatorTest {
private static final String FINGERPRINT_HEX =
"a".repeat(64); // 64 lowercase hex chars
/** Default transient retry limit used in the shared {@link #processor} instance. */
private static final int DEFAULT_MAX_RETRIES_TRANSIENT = 3;
private CapturingDocumentRecordRepository recordRepo;
private CapturingProcessingAttemptRepository attemptRepo;
private CapturingUnitOfWorkPort unitOfWorkPort;
@@ -86,7 +89,8 @@ class DocumentProcessingCoordinatorTest {
attemptRepo = new CapturingProcessingAttemptRepository();
unitOfWorkPort = new CapturingUnitOfWorkPort(recordRepo, attemptRepo);
processor = new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger());
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
candidate = new SourceDocumentCandidate(
"test.pdf", 1024L, new SourceDocumentLocator("/tmp/test.pdf"));
@@ -198,9 +202,11 @@ class DocumentProcessingCoordinatorTest {
@Test
void process_knownDocument_technicalError_incrementsTransientCounter_remainsRetryable() {
// Starting with 1 transient error; with DEFAULT_MAX_RETRIES_TRANSIENT=3, counter
// becomes 2 after this run which is still below the limit → FAILED_RETRYABLE
DocumentRecord existingRecord = buildRecord(
ProcessingStatus.FAILED_RETRYABLE,
new FailureCounters(0, 2));
new FailureCounters(0, 1));
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
DocumentProcessingOutcome outcome = new TechnicalDocumentError(
@@ -212,10 +218,54 @@ class DocumentProcessingCoordinatorTest {
DocumentRecord record = recordRepo.updatedRecords.get(0);
assertEquals(ProcessingStatus.FAILED_RETRYABLE, record.overallStatus());
assertEquals(0, record.failureCounters().contentErrorCount());
assertEquals(3, record.failureCounters().transientErrorCount());
assertEquals(2, record.failureCounters().transientErrorCount());
assertTrue(attemptRepo.savedAttempts.get(0).retryable());
}
@Test
void process_knownDocument_technicalError_atTransientLimit_persistsFailedFinal() {
// Counter already at limit - 1: the next error finalises the document
DocumentRecord existingRecord = buildRecord(
ProcessingStatus.FAILED_RETRYABLE,
new FailureCounters(0, DEFAULT_MAX_RETRIES_TRANSIENT - 1));
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
DocumentProcessingOutcome outcome = new TechnicalDocumentError(
candidate, "Timeout at limit", null);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, recordRepo.updatedRecords.size());
DocumentRecord record = recordRepo.updatedRecords.get(0);
assertEquals(ProcessingStatus.FAILED_FINAL, record.overallStatus(),
"Document must be finalised when transient limit is reached");
assertEquals(DEFAULT_MAX_RETRIES_TRANSIENT, record.failureCounters().transientErrorCount(),
"Transient counter must be incremented to the limit value");
assertFalse(attemptRepo.savedAttempts.get(0).retryable(),
"Attempt must not be retryable when transient limit is reached");
}
@Test
void process_newDocument_technicalError_maxRetriesTransient1_immediatelyFinalises() {
// With maxRetriesTransient=1, the very first transient error finalises the document
DocumentProcessingCoordinator coordinatorWith1Retry = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 1);
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome outcome = new TechnicalDocumentError(
candidate, "I/O error", null);
coordinatorWith1Retry.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, recordRepo.createdRecords.size());
DocumentRecord record = recordRepo.createdRecords.get(0);
assertEquals(ProcessingStatus.FAILED_FINAL, record.overallStatus(),
"With maxRetriesTransient=1, the first transient error must immediately finalise");
assertEquals(1, record.failureCounters().transientErrorCount());
assertFalse(attemptRepo.savedAttempts.get(0).retryable());
}
@Test
void process_knownDocument_namingProposalReady_persistsProposalReadyStatus() {
DocumentRecord existingRecord = buildRecord(
@@ -617,7 +667,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
recordRepo.setLookupResult(new PersistenceLookupTechnicalFailure("Datenbank nicht erreichbar", null));
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
@@ -634,7 +685,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DocumentRecord existingRecord = buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
DocumentProcessingOutcome outcome = new PreCheckPassed(
@@ -652,7 +704,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DocumentRecord existingRecord = buildRecord(ProcessingStatus.FAILED_FINAL, new FailureCounters(2, 0));
recordRepo.setLookupResult(new DocumentTerminalFinalFailure(existingRecord));
DocumentProcessingOutcome outcome = new PreCheckFailed(
@@ -670,7 +723,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
@@ -687,7 +741,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
recordRepo.setLookupResult(new DocumentUnknown());
unitOfWorkPort.failOnExecute = true;
DocumentProcessingOutcome outcome = new PreCheckPassed(
@@ -705,7 +760,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DocumentRecord existingRecord = buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
DocumentProcessingOutcome outcome = new PreCheckPassed(
@@ -723,7 +779,8 @@ class DocumentProcessingCoordinatorTest {
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
DocumentProcessingCoordinator coordinatorWithCapturingLogger =
new DocumentProcessingCoordinator(recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger);
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), capturingLogger,
DEFAULT_MAX_RETRIES_TRANSIENT);
DocumentRecord existingRecord = buildRecord(ProcessingStatus.SUCCESS, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
unitOfWorkPort.failOnExecute = true;
@@ -811,7 +868,8 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithFailingFolder = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new FailingTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger());
new FailingTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
coordinatorWithFailingFolder.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -830,7 +888,8 @@ class DocumentProcessingCoordinatorTest {
DocumentProcessingCoordinator coordinatorWithFailingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new FailingTargetFileCopyPort(), new NoOpProcessingLogger());
new NoOpTargetFolderPort(), new FailingTargetFileCopyPort(), new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
coordinatorWithFailingCopy.processDeferredOutcome(candidate, fingerprint, context, attemptStart, c -> null);
@@ -902,6 +961,175 @@ class DocumentProcessingCoordinatorTest {
assertFalse(result, "Should return false when persistence fails after successful copy");
}
@Test
void processDeferredOutcome_proposalReady_firstCopyFails_immediateRetrySucceeds_persistsSuccess() {
// First copy attempt fails, immediate within-run retry succeeds → SUCCESS
DocumentRecord existingRecord = buildRecord(ProcessingStatus.PROPOSAL_READY, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
attemptRepo.savedAttempts.add(buildValidProposalAttempt());
CountingTargetFileCopyPort countingCopyPort = new CountingTargetFileCopyPort(1); // fail first call only
DocumentProcessingCoordinator coordinatorWithCountingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), countingCopyPort, new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
boolean result = coordinatorWithCountingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> {
throw new AssertionError("Pipeline must not run for PROPOSAL_READY");
});
assertTrue(result, "Should succeed when immediate retry of target copy succeeds");
ProcessingAttempt successAttempt = attemptRepo.savedAttempts.stream()
.filter(a -> a.status() == ProcessingStatus.SUCCESS)
.findFirst()
.orElse(null);
assertNotNull(successAttempt, "A SUCCESS attempt must be persisted after a successful immediate retry");
DocumentRecord updated = recordRepo.updatedRecords.get(0);
assertEquals(ProcessingStatus.SUCCESS, updated.overallStatus(),
"Master record must show SUCCESS after successful immediate retry");
assertEquals(2, countingCopyPort.callCount,
"copyToTarget must have been called exactly twice: first attempt + one retry");
}
@Test
void processDeferredOutcome_proposalReady_bothCopyAttemptsFail_persistsTransientError() {
// Both the first copy attempt and the immediate retry fail → FAILED_RETRYABLE
DocumentRecord existingRecord = buildRecord(ProcessingStatus.PROPOSAL_READY, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
attemptRepo.savedAttempts.add(buildValidProposalAttempt());
CountingTargetFileCopyPort countingCopyPort = new CountingTargetFileCopyPort(2); // fail both calls
DocumentProcessingCoordinator coordinatorWithCountingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), countingCopyPort, new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
coordinatorWithCountingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart, c -> null);
ProcessingAttempt errorAttempt = attemptRepo.savedAttempts.stream()
.filter(a -> a.status() == ProcessingStatus.FAILED_RETRYABLE)
.findFirst()
.orElse(null);
assertNotNull(errorAttempt, "A FAILED_RETRYABLE attempt must be persisted when both copy attempts fail");
assertTrue(errorAttempt.retryable(), "Error must be retryable after exhausting immediate retry");
assertEquals(2, countingCopyPort.callCount,
"copyToTarget must have been called exactly twice: first attempt + one immediate retry");
}
@Test
void processDeferredOutcome_proposalReady_immediateRetryDoesNotTriggerAiOrNewProposal() {
// Ensures that during the immediate retry path no pipeline (AI) execution happens
DocumentRecord existingRecord = buildRecord(ProcessingStatus.PROPOSAL_READY, FailureCounters.zero());
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
attemptRepo.savedAttempts.add(buildValidProposalAttempt());
CountingTargetFileCopyPort countingCopyPort = new CountingTargetFileCopyPort(1); // fail first, succeed second
DocumentProcessingCoordinator coordinatorWithCountingCopy = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), countingCopyPort, new NoOpProcessingLogger(),
DEFAULT_MAX_RETRIES_TRANSIENT);
coordinatorWithCountingCopy.processDeferredOutcome(
candidate, fingerprint, context, attemptStart,
c -> { throw new AssertionError("AI pipeline must NOT run during immediate retry"); });
// No FAILED_RETRYABLE must have been persisted — the retry succeeded
long failedRetryableCount = attemptRepo.savedAttempts.stream()
.filter(a -> a.status() == ProcessingStatus.FAILED_RETRYABLE)
.count();
assertEquals(0, failedRetryableCount,
"No FAILED_RETRYABLE must be persisted when immediate retry succeeds");
}
// -------------------------------------------------------------------------
// Sequential multi-run lifecycle tests
// -------------------------------------------------------------------------
@Test
void process_contentErrorLifecycle_firstRunRetryable_secondRunFinal_thirdRunSkipped() {
// Run 1: new document, first deterministic content error → FAILED_RETRYABLE
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome contentError = new PreCheckFailed(
candidate, PreCheckFailureReason.NO_USABLE_TEXT);
processor.process(candidate, fingerprint, contentError, context, attemptStart);
DocumentRecord afterRun1 = recordRepo.createdRecords.get(0);
assertEquals(ProcessingStatus.FAILED_RETRYABLE, afterRun1.overallStatus(),
"First content error must yield FAILED_RETRYABLE");
assertEquals(1, afterRun1.failureCounters().contentErrorCount());
assertTrue(attemptRepo.savedAttempts.get(0).retryable(),
"First content error attempt must be retryable");
// Run 2: known document (FAILED_RETRYABLE, contentErrorCount=1), second content error → FAILED_FINAL
recordRepo.setLookupResult(new DocumentKnownProcessable(afterRun1));
processor.process(candidate, fingerprint, contentError, context, attemptStart);
DocumentRecord afterRun2 = recordRepo.updatedRecords.get(0);
assertEquals(ProcessingStatus.FAILED_FINAL, afterRun2.overallStatus(),
"Second content error must yield FAILED_FINAL");
assertEquals(2, afterRun2.failureCounters().contentErrorCount());
assertFalse(attemptRepo.savedAttempts.get(1).retryable(),
"Second content error attempt must not be retryable");
// Run 3: terminal FAILED_FINAL → SKIPPED_FINAL_FAILURE; counters must not change
recordRepo.setLookupResult(new DocumentTerminalFinalFailure(afterRun2));
processor.process(candidate, fingerprint, contentError, context, attemptStart);
assertEquals(3, attemptRepo.savedAttempts.size(),
"Three attempts must be recorded across the three runs");
ProcessingAttempt skipAttempt = attemptRepo.savedAttempts.get(2);
assertEquals(ProcessingStatus.SKIPPED_FINAL_FAILURE, skipAttempt.status());
assertFalse(skipAttempt.retryable());
DocumentRecord afterRun3 = recordRepo.updatedRecords.get(1);
assertEquals(2, afterRun3.failureCounters().contentErrorCount(),
"Content error counter must remain 2 after a SKIPPED_FINAL_FAILURE event");
assertEquals(0, afterRun3.failureCounters().transientErrorCount(),
"Transient error counter must remain 0 after a SKIPPED_FINAL_FAILURE event");
}
@Test
void process_transientErrorLifecycle_maxRetriesTransient2_firstRetryable_secondFinal() {
// maxRetriesTransient=2: first transient error → FAILED_RETRYABLE, second → FAILED_FINAL
DocumentProcessingCoordinator coordinatorWith2Retries = new DocumentProcessingCoordinator(
recordRepo, attemptRepo, unitOfWorkPort,
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 2);
DocumentProcessingOutcome transientError = new TechnicalDocumentError(candidate, "Timeout", null);
// Run 1: new document, first transient error → FAILED_RETRYABLE, transientErrorCount=1
recordRepo.setLookupResult(new DocumentUnknown());
coordinatorWith2Retries.process(candidate, fingerprint, transientError, context, attemptStart);
DocumentRecord afterRun1 = recordRepo.createdRecords.get(0);
assertEquals(ProcessingStatus.FAILED_RETRYABLE, afterRun1.overallStatus(),
"First transient error must yield FAILED_RETRYABLE when limit not yet reached");
assertEquals(1, afterRun1.failureCounters().transientErrorCount());
assertTrue(attemptRepo.savedAttempts.get(0).retryable());
// Run 2: transientErrorCount=1, second transient error reaches limit=2 → FAILED_FINAL
recordRepo.setLookupResult(new DocumentKnownProcessable(afterRun1));
coordinatorWith2Retries.process(candidate, fingerprint, transientError, context, attemptStart);
DocumentRecord afterRun2 = recordRepo.updatedRecords.get(0);
assertEquals(ProcessingStatus.FAILED_FINAL, afterRun2.overallStatus(),
"Second transient error must yield FAILED_FINAL when maxRetriesTransient=2 is reached");
assertEquals(2, afterRun2.failureCounters().transientErrorCount(),
"Transient error counter must equal maxRetriesTransient after finalisation");
assertFalse(attemptRepo.savedAttempts.get(1).retryable(),
"Final transient error attempt must not be retryable");
}
// -------------------------------------------------------------------------
// Helpers
// -------------------------------------------------------------------------
@@ -1089,6 +1317,26 @@ class DocumentProcessingCoordinatorTest {
}
}
private static class CountingTargetFileCopyPort implements TargetFileCopyPort {
private int callCount = 0;
private final int failFirstNCalls;
CountingTargetFileCopyPort(int failFirstNCalls) {
this.failFirstNCalls = failFirstNCalls;
}
@Override
public TargetFileCopyResult copyToTarget(
de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentLocator sourceLocator,
String resolvedFilename) {
callCount++;
if (callCount <= failFirstNCalls) {
return new TargetFileCopyTechnicalFailure("Simulated copy failure on call " + callCount, false);
}
return new TargetFileCopySuccess();
}
}
private static class NoOpTargetFolderPort implements TargetFolderPort {
@Override
public String getTargetFolderLocator() {

View File

@@ -1,6 +1,7 @@
package de.gecheckt.pdf.umbenenner.application.service;
import de.gecheckt.pdf.umbenenner.application.config.RuntimeConfiguration;
import de.gecheckt.pdf.umbenenner.application.port.out.AiContentSensitivity;
import de.gecheckt.pdf.umbenenner.domain.model.DocumentProcessingOutcome;
import de.gecheckt.pdf.umbenenner.domain.model.PreCheckFailed;
import de.gecheckt.pdf.umbenenner.domain.model.PreCheckFailureReason;
@@ -44,8 +45,8 @@ class DocumentProcessingServiceTest {
SourceDocumentLocator locator = new SourceDocumentLocator(pdfFile.toString());
candidate = new SourceDocumentCandidate("document.pdf", 2048L, locator);
// Create runtime configuration with maxPages limit
runtimeConfig = new RuntimeConfiguration(10);
// Create runtime configuration with maxPages limit and default transient retry limit
runtimeConfig = new RuntimeConfiguration(10, 3, AiContentSensitivity.PROTECT_SENSITIVE_CONTENT);
}
@Test

View File

@@ -1,6 +1,7 @@
package de.gecheckt.pdf.umbenenner.application.service;
import de.gecheckt.pdf.umbenenner.application.config.RuntimeConfiguration;
import de.gecheckt.pdf.umbenenner.application.port.out.AiContentSensitivity;
import de.gecheckt.pdf.umbenenner.domain.model.DocumentProcessingOutcome;
import de.gecheckt.pdf.umbenenner.domain.model.PreCheckFailed;
import de.gecheckt.pdf.umbenenner.domain.model.PreCheckFailureReason;
@@ -236,7 +237,7 @@ class PreCheckEvaluatorTest {
// =========================================================================
private RuntimeConfiguration buildConfig(int maxPages) throws Exception {
return new RuntimeConfiguration(maxPages);
return new RuntimeConfiguration(maxPages, 3, AiContentSensitivity.PROTECT_SENSITIVE_CONTENT);
}
private int maxPages(int limit) {

View File

@@ -2,6 +2,7 @@ package de.gecheckt.pdf.umbenenner.application.usecase;
import de.gecheckt.pdf.umbenenner.application.config.RuntimeConfiguration;
import de.gecheckt.pdf.umbenenner.application.port.in.BatchRunOutcome;
import de.gecheckt.pdf.umbenenner.application.port.out.AiContentSensitivity;
import de.gecheckt.pdf.umbenenner.application.port.out.AiInvocationPort;
import de.gecheckt.pdf.umbenenner.application.port.out.AiInvocationTechnicalFailure;
import de.gecheckt.pdf.umbenenner.application.port.out.ClockPort;
@@ -52,6 +53,7 @@ import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -460,7 +462,7 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator failingProcessor = new DocumentProcessingCoordinator(
new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(),
new NoOpUnitOfWorkPort(), new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(),
new NoOpProcessingLogger()) {
new NoOpProcessingLogger(), 3) {
@Override
public boolean processDeferredOutcome(
de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate candidate,
@@ -504,7 +506,7 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator selectiveFailingProcessor = new DocumentProcessingCoordinator(
new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(),
new NoOpUnitOfWorkPort(), new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(),
new NoOpProcessingLogger()) {
new NoOpProcessingLogger(), 3) {
private int callCount = 0;
@Override
@@ -595,7 +597,7 @@ class BatchRunProcessingUseCaseTest {
DocumentProcessingCoordinator failingCoordinator = new DocumentProcessingCoordinator(
new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(),
new NoOpUnitOfWorkPort(), new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(),
new NoOpProcessingLogger()) {
new NoOpProcessingLogger(), 3) {
@Override
public boolean processDeferredOutcome(
de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate c,
@@ -660,11 +662,12 @@ class BatchRunProcessingUseCaseTest {
// Prüft, dass bei erfolgreich verarbeiteter Datei debug() durch logExtractionResult
// und info() durch logProcessingOutcome aufgerufen wird.
// Erwartete debug()-Aufrufe für einen Kandidaten (success-Pfad):
// L138 (lock acquired) + L249 (processCandidate) + L293 (fingerprint) + L337 (logExtractionResult) + L213 (lock released) = 5
// Ohne logExtractionResult-Aufruf: 4
// lock acquired + fingerprint computed + logExtractionResult + lock released = 4
// Ohne logExtractionResult-Aufruf wären es nur 3 debug()-Aufrufe.
// Erwartete info()-Aufrufe für einen Kandidaten (success-Pfad):
// L130 (initiiert) + L145 (gestartet) + L178 (Kandidaten gefunden) + L365 (PreCheckPassed) + L190 (abgeschlossen) = 5
// Ohne logProcessingOutcome-Aufruf: 4
// Batch initiiert + Batch gestartet + Kandidaten gefunden + erkannte Quelldatei
// + logProcessingOutcome (PreCheckPassed) + Batch abgeschlossen = 6
// Ohne logProcessingOutcome-Aufruf wären es 5 info()-Aufrufe.
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
RuntimeConfiguration config = buildConfig(tempDir);
@@ -680,21 +683,21 @@ class BatchRunProcessingUseCaseTest {
useCase.execute(new BatchRunContext(new RunId("log-precheck"), Instant.now()));
// Ohne logExtractionResult wären es mindestens 4 debug()-Aufrufe; mit logExtractionResult 5
assertTrue(capturingLogger.debugCallCount >= 5,
"logExtractionResult muss bei PdfExtractionSuccess debug() aufrufen (erwartet >= 5, war: "
// Ohne logExtractionResult wären es nur 3 debug()-Aufrufe; mit logExtractionResult >= 4
assertTrue(capturingLogger.debugCallCount >= 4,
"logExtractionResult muss bei PdfExtractionSuccess debug() aufrufen (erwartet >= 4, war: "
+ capturingLogger.debugCallCount + ")");
// Ohne logProcessingOutcome wären es 4 info()-Aufrufe; mit logProcessingOutcome 5
assertTrue(capturingLogger.infoCallCount >= 5,
"logProcessingOutcome muss bei PreCheckPassed info() aufrufen (erwartet >= 5, war: "
// Ohne logProcessingOutcome wären es 5 info()-Aufrufe; mit logProcessingOutcome >= 6
assertTrue(capturingLogger.infoCallCount >= 6,
"logProcessingOutcome muss bei PreCheckPassed info() aufrufen (erwartet >= 6, war: "
+ capturingLogger.infoCallCount + ")");
}
@Test
void execute_extractionContentError_logsDebugAndPreCheckFailedInfo() throws Exception {
// Prüft, dass bei PdfExtractionContentError debug (logExtractionResult) und info (logProcessingOutcome) geloggt wird.
// Erwartete debug()-Aufrufe: 5 (lock + processCandidate + fingerprint + logExtractionResult (content) + lock released)
// Erwartete info()-Aufrufe: 5 (L130 + L145 + L178 + L369 PreCheckFailed + L190)
// Erwartete debug()-Aufrufe: 4 (lock acquired + fingerprint + logExtractionResult (content) + lock released)
// Erwartete info()-Aufrufe: 6 (Batch initiiert + gestartet + Kandidaten gefunden + erkannte Quelldatei + PreCheckFailed + abgeschlossen)
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
RuntimeConfiguration config = buildConfig(tempDir);
@@ -710,20 +713,20 @@ class BatchRunProcessingUseCaseTest {
useCase.execute(new BatchRunContext(new RunId("log-content-error"), Instant.now()));
// Ohne logExtractionResult wären es 4 debug()-Aufrufe; mit logExtractionResult 5
assertTrue(capturingLogger.debugCallCount >= 5,
"logExtractionResult muss bei PdfExtractionContentError debug() aufrufen (erwartet >= 5, war: "
// Ohne logExtractionResult wären es nur 3 debug()-Aufrufe; mit logExtractionResult >= 4
assertTrue(capturingLogger.debugCallCount >= 4,
"logExtractionResult muss bei PdfExtractionContentError debug() aufrufen (erwartet >= 4, war: "
+ capturingLogger.debugCallCount + ")");
// Ohne logProcessingOutcome (PreCheckFailed) wären es 4 info()-Aufrufe; mit 5
assertTrue(capturingLogger.infoCallCount >= 5,
"logProcessingOutcome muss bei PreCheckFailed info() aufrufen (erwartet >= 5, war: "
// Ohne logProcessingOutcome (PreCheckFailed) wären es 5 info()-Aufrufe; mit >= 6
assertTrue(capturingLogger.infoCallCount >= 6,
"logProcessingOutcome muss bei PreCheckFailed info() aufrufen (erwartet >= 6, war: "
+ capturingLogger.infoCallCount + ")");
}
@Test
void execute_extractionTechnicalError_logsDebugAndWarn() throws Exception {
// Prüft, dass bei PdfExtractionTechnicalError debug (logExtractionResult) und warn (logProcessingOutcome) geloggt wird.
// Erwartete debug()-Aufrufe: 5 (lock + processCandidate + fingerprint + logExtractionResult + lock released)
// Erwartete debug()-Aufrufe: 4 (lock acquired + fingerprint + logExtractionResult + lock released)
CapturingProcessingLogger capturingLogger = new CapturingProcessingLogger();
RuntimeConfiguration config = buildConfig(tempDir);
@@ -739,15 +742,86 @@ class BatchRunProcessingUseCaseTest {
useCase.execute(new BatchRunContext(new RunId("log-tech-error"), Instant.now()));
// Ohne logExtractionResult wären es 4 debug()-Aufrufe; mit logExtractionResult 5
assertTrue(capturingLogger.debugCallCount >= 5,
"logExtractionResult muss bei PdfExtractionTechnicalError debug() aufrufen (erwartet >= 5, war: "
// Ohne logExtractionResult wären es nur 3 debug()-Aufrufe; mit logExtractionResult >= 4
assertTrue(capturingLogger.debugCallCount >= 4,
"logExtractionResult muss bei PdfExtractionTechnicalError debug() aufrufen (erwartet >= 4, war: "
+ capturingLogger.debugCallCount + ")");
// logProcessingOutcome ruft warn() auf für TechnicalDocumentError
assertTrue(capturingLogger.warnCallCount > 0,
"logProcessingOutcome muss bei TechnicalDocumentError warn() aufrufen");
}
// -------------------------------------------------------------------------
// Log correlation tests
// -------------------------------------------------------------------------
@Test
void execute_preFingerprintError_logContainsRunIdAndCandidateDescription() throws Exception {
// When fingerprint computation fails, the warning log must reference both the run-ID
// and the candidate's unique identifier (pre-fingerprint correlation rule).
String runIdValue = "run-correlation-pre-fp";
String candidateFilename = "unreadable-candidate.pdf";
MessageCapturingProcessingLogger capturingLogger = new MessageCapturingProcessingLogger();
RuntimeConfiguration config = buildConfig(tempDir);
FixedCandidatesPort candidatesPort = new FixedCandidatesPort(
List.of(makeCandidate(candidateFilename)));
// Fingerprint port that always fails
FingerprintPort failingFingerprintPort = c ->
new FingerprintTechnicalError("File not readable", null);
DefaultBatchRunProcessingUseCase useCase = new DefaultBatchRunProcessingUseCase(
config, new MockRunLockPort(), candidatesPort, new NoOpExtractionPort(),
failingFingerprintPort, new NoOpDocumentProcessingCoordinator(),
buildStubAiNamingService(), capturingLogger);
useCase.execute(new BatchRunContext(new RunId(runIdValue), Instant.now()));
// At least one warning message must contain both run-ID and candidate filename
boolean correlationPresent = capturingLogger.warnMessages.stream()
.anyMatch(m -> m.contains(runIdValue) && m.contains(candidateFilename));
assertTrue(correlationPresent,
"Pre-fingerprint warning must reference both run-ID '" + runIdValue
+ "' and candidate '" + candidateFilename + "'. "
+ "Captured warn messages: " + capturingLogger.warnMessages);
}
@Test
void execute_postFingerprintProcessing_logContainsFingerprintHex() throws Exception {
// After a successful fingerprint computation, at least one log message must contain
// the fingerprint's SHA-256 hex value (post-fingerprint correlation rule).
String candidateFilename = "identifiable.pdf";
MessageCapturingProcessingLogger capturingLogger = new MessageCapturingProcessingLogger();
RuntimeConfiguration config = buildConfig(tempDir);
SourceDocumentCandidate candidate = makeCandidate(candidateFilename);
FixedCandidatesPort candidatesPort = new FixedCandidatesPort(List.of(candidate));
FixedExtractionPort extractionPort = new FixedExtractionPort(
new PdfExtractionSuccess("Some invoice text", new PdfPageCount(1)));
// Deterministic fingerprint port so we can verify the exact hex in the log
AlwaysSuccessFingerprintPort fingerprintPort = new AlwaysSuccessFingerprintPort();
DocumentFingerprint expectedFingerprint = ((FingerprintSuccess) fingerprintPort.computeFingerprint(candidate)).fingerprint();
DefaultBatchRunProcessingUseCase useCase = new DefaultBatchRunProcessingUseCase(
config, new MockRunLockPort(), candidatesPort, extractionPort,
fingerprintPort, new TrackingDocumentProcessingCoordinator(),
buildStubAiNamingService(), capturingLogger);
useCase.execute(new BatchRunContext(new RunId("run-correlation-post-fp"), Instant.now()));
String fingerprintHex = expectedFingerprint.sha256Hex();
boolean fingerprintInLog = capturingLogger.allMessages().stream()
.anyMatch(m -> m.contains(fingerprintHex));
assertTrue(fingerprintInLog,
"At least one log message must contain the fingerprint hex '" + fingerprintHex
+ "' after successful fingerprint computation. "
+ "Captured messages: " + capturingLogger.allMessages());
}
// -------------------------------------------------------------------------
// Helpers
// -------------------------------------------------------------------------
@@ -779,8 +853,8 @@ class BatchRunProcessingUseCaseTest {
}
private static RuntimeConfiguration buildConfig(Path tempDir) throws Exception {
// maxPages set to 3 useful for page-limit tests
return new RuntimeConfiguration(3);
// maxPages set to 3 useful for page-limit tests; maxRetriesTransient set to 3
return new RuntimeConfiguration(3, 3, AiContentSensitivity.PROTECT_SENSITIVE_CONTENT);
}
private static SourceDocumentCandidate makeCandidate(String filename) {
@@ -937,7 +1011,7 @@ class BatchRunProcessingUseCaseTest {
private static class NoOpDocumentProcessingCoordinator extends DocumentProcessingCoordinator {
NoOpDocumentProcessingCoordinator() {
super(new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(), new NoOpUnitOfWorkPort(),
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger());
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
}
}
@@ -949,7 +1023,7 @@ class BatchRunProcessingUseCaseTest {
TrackingDocumentProcessingCoordinator() {
super(new NoOpDocumentRecordRepository(), new NoOpProcessingAttemptRepository(), new NoOpUnitOfWorkPort(),
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger());
new NoOpTargetFolderPort(), new NoOpTargetFileCopyPort(), new NoOpProcessingLogger(), 3);
}
@Override
@@ -1094,6 +1168,62 @@ class BatchRunProcessingUseCaseTest {
}
}
/**
* Captures formatted log messages for each log level.
* Used by log-correlation tests that must inspect message content.
*/
private static class MessageCapturingProcessingLogger implements ProcessingLogger {
final List<String> infoMessages = new ArrayList<>();
final List<String> debugMessages = new ArrayList<>();
final List<String> warnMessages = new ArrayList<>();
final List<String> errorMessages = new ArrayList<>();
/** Formats a message template with its arguments the same way SLF4J/Log4j2 does. */
private static String format(String message, Object... args) {
if (args == null || args.length == 0) return message;
StringBuilder sb = new StringBuilder();
int argIndex = 0;
int start = 0;
int pos;
while ((pos = message.indexOf("{}", start)) != -1 && argIndex < args.length) {
sb.append(message, start, pos);
sb.append(args[argIndex++]);
start = pos + 2;
}
sb.append(message, start, message.length());
return sb.toString();
}
@Override
public void info(String message, Object... args) {
infoMessages.add(format(message, args));
}
@Override
public void debug(String message, Object... args) {
debugMessages.add(format(message, args));
}
@Override
public void warn(String message, Object... args) {
warnMessages.add(format(message, args));
}
@Override
public void error(String message, Object... args) {
errorMessages.add(format(message, args));
}
List<String> allMessages() {
List<String> all = new ArrayList<>();
all.addAll(infoMessages);
all.addAll(debugMessages);
all.addAll(warnMessages);
all.addAll(errorMessages);
return all;
}
}
/** Zählt Logger-Aufrufe je Level, um VoidMethodCallMutator-Mutationen zu erkennen. */
private static class CapturingProcessingLogger implements ProcessingLogger {
int infoCallCount = 0;