1
0

Nachbearbeitung: Meilensteinbezeichner aus DocumentProcessingCoordinator

entfernt
This commit is contained in:
2026-04-04 12:20:27 +02:00
parent 3e65eff6e6
commit cb7ed57721
4 changed files with 66 additions and 66 deletions

View File

@@ -113,7 +113,7 @@ public class DocumentProcessingCoordinator {
* <p>
* The caller must have already computed a valid {@link DocumentFingerprint} for the
* candidate. The outcome (from the PDF extraction and pre-check pipeline) is
* provided as {@code m3Outcome} and is used only when the document is not in a
* provided as {@code outcome} and is used only when the document is not in a
* terminal state.
* <p>
* This method never throws. All persistence failures are caught, logged, and
@@ -122,7 +122,7 @@ public class DocumentProcessingCoordinator {
* @param candidate the source document candidate being processed; must not be null
* @param fingerprint the successfully computed fingerprint for this candidate;
* must not be null
* @param m3Outcome the result of the pipeline (PDF extraction + pre-checks);
* @param outcome the result of the extraction and pre-check pipeline;
* must not be null
* @param context the current batch run context (for run ID and timing);
* must not be null
@@ -132,17 +132,17 @@ public class DocumentProcessingCoordinator {
public void process(
SourceDocumentCandidate candidate,
DocumentFingerprint fingerprint,
DocumentProcessingOutcome m3Outcome,
DocumentProcessingOutcome outcome,
BatchRunContext context,
Instant attemptStart) {
Objects.requireNonNull(candidate, "candidate must not be null");
Objects.requireNonNull(fingerprint, "fingerprint must not be null");
Objects.requireNonNull(m3Outcome, "m3Outcome must not be null");
Objects.requireNonNull(outcome, "outcome must not be null");
Objects.requireNonNull(context, "context must not be null");
Objects.requireNonNull(attemptStart, "attemptStart must not be null");
processWithM3Execution(candidate, fingerprint, context, attemptStart, ignored -> m3Outcome);
processDeferredOutcome(candidate, fingerprint, context, attemptStart, ignored -> outcome);
}
/**
@@ -169,21 +169,21 @@ public class DocumentProcessingCoordinator {
* must not be null
* @param attemptStart the instant at which processing of this candidate began;
* must not be null
* @param m3Executor functional interface to execute the pipeline when needed;
* must not be null
* @param pipelineExecutor functional interface that executes the extraction and pre-check
* pipeline when needed; must not be null
*/
public void processWithM3Execution(
public void processDeferredOutcome(
SourceDocumentCandidate candidate,
DocumentFingerprint fingerprint,
BatchRunContext context,
Instant attemptStart,
Function<SourceDocumentCandidate, DocumentProcessingOutcome> m3Executor) {
Function<SourceDocumentCandidate, DocumentProcessingOutcome> pipelineExecutor) {
Objects.requireNonNull(candidate, "candidate must not be null");
Objects.requireNonNull(fingerprint, "fingerprint must not be null");
Objects.requireNonNull(context, "context must not be null");
Objects.requireNonNull(attemptStart, "attemptStart must not be null");
Objects.requireNonNull(m3Executor, "m3Executor must not be null");
Objects.requireNonNull(pipelineExecutor, "pipelineExecutor must not be null");
// Step 1: Load the document master record
DocumentRecordLookupResult lookupResult =
@@ -220,15 +220,15 @@ public class DocumentProcessingCoordinator {
case DocumentUnknown ignored -> {
// New document execute pipeline and process
DocumentProcessingOutcome m3Outcome = m3Executor.apply(candidate);
processAndPersistNewDocument(candidate, fingerprint, m3Outcome, context, attemptStart);
DocumentProcessingOutcome outcome = pipelineExecutor.apply(candidate);
processAndPersistNewDocument(candidate, fingerprint, outcome, context, attemptStart);
}
case DocumentKnownProcessable knownProcessable -> {
// Known but not terminal execute pipeline and process
DocumentProcessingOutcome m3Outcome = m3Executor.apply(candidate);
DocumentProcessingOutcome outcome = pipelineExecutor.apply(candidate);
processAndPersistKnownDocument(
candidate, fingerprint, m3Outcome, knownProcessable.record(),
candidate, fingerprint, outcome, knownProcessable.record(),
context, attemptStart);
}
@@ -318,23 +318,23 @@ public class DocumentProcessingCoordinator {
* Processes a newly discovered document (no existing master record) and persists
* both the attempt and the new master record.
*
* @param candidate the candidate being processed
* @param fingerprint the document fingerprint
* @param m3Outcome the pipeline result
* @param context the current batch run context
* @param attemptStart the start instant of this processing attempt
* @param candidate the candidate being processed
* @param fingerprint the document fingerprint
* @param pipelineOutcome the pipeline result
* @param context the current batch run context
* @param attemptStart the start instant of this processing attempt
*/
private void processAndPersistNewDocument(
SourceDocumentCandidate candidate,
DocumentFingerprint fingerprint,
DocumentProcessingOutcome m3Outcome,
DocumentProcessingOutcome pipelineOutcome,
BatchRunContext context,
Instant attemptStart) {
Instant now = Instant.now();
// Map outcome to status/counters for a brand-new document
ProcessingOutcome outcome = mapM3OutcomeForNewDocument(m3Outcome);
ProcessingOutcome outcome = mapOutcomeForNewDocument(pipelineOutcome);
try {
// Attempt number is always 1 for a new document
@@ -382,17 +382,17 @@ public class DocumentProcessingCoordinator {
* Processes a known but non-terminal document and updates both the attempt history
* and the master record.
*
* @param candidate the candidate being processed
* @param fingerprint the document fingerprint
* @param m3Outcome the pipeline result
* @param existingRecord the current master record (not terminal)
* @param context the current batch run context
* @param attemptStart the start instant of this processing attempt
* @param candidate the candidate being processed
* @param fingerprint the document fingerprint
* @param pipelineOutcome the pipeline result
* @param existingRecord the current master record (not terminal)
* @param context the current batch run context
* @param attemptStart the start instant of this processing attempt
*/
private void processAndPersistKnownDocument(
SourceDocumentCandidate candidate,
DocumentFingerprint fingerprint,
DocumentProcessingOutcome m3Outcome,
DocumentProcessingOutcome pipelineOutcome,
DocumentRecord existingRecord,
BatchRunContext context,
Instant attemptStart) {
@@ -400,7 +400,7 @@ public class DocumentProcessingCoordinator {
Instant now = Instant.now();
// Map outcome to status/counters, taking existing counters into account
ProcessingOutcome outcome = mapM3OutcomeForKnownDocument(m3Outcome, existingRecord.failureCounters());
ProcessingOutcome outcome = mapOutcomeForKnownDocument(pipelineOutcome, existingRecord.failureCounters());
try {
int attemptNumber = processingAttemptRepository.loadNextAttemptNumber(fingerprint);
@@ -449,11 +449,11 @@ public class DocumentProcessingCoordinator {
* Maps an outcome to status, counters, and retryable flag for a brand-new
* document (no prior history, counters start at zero).
*
* @param m3Outcome the pipeline result
* @param pipelineOutcome the pipeline result
* @return the outcome with status, counters and retryable flag
*/
private ProcessingOutcome mapM3OutcomeForNewDocument(DocumentProcessingOutcome m3Outcome) {
return mapM3OutcomeForKnownDocument(m3Outcome, FailureCounters.zero());
private ProcessingOutcome mapOutcomeForNewDocument(DocumentProcessingOutcome pipelineOutcome) {
return mapOutcomeForKnownDocument(pipelineOutcome, FailureCounters.zero());
}
/**
@@ -474,15 +474,15 @@ public class DocumentProcessingCoordinator {
* transientErrorCount +1, {@code retryable=true}.</li>
* </ul>
*
* @param m3Outcome the pipeline result
* @param pipelineOutcome the pipeline result
* @param existingCounters the current failure counters from the master record
* @return the outcome with updated status, counters and retryable flag
*/
private ProcessingOutcome mapM3OutcomeForKnownDocument(
DocumentProcessingOutcome m3Outcome,
private ProcessingOutcome mapOutcomeForKnownDocument(
DocumentProcessingOutcome pipelineOutcome,
FailureCounters existingCounters) {
return switch (m3Outcome) {
return switch (pipelineOutcome) {
case de.gecheckt.pdf.umbenenner.domain.model.PreCheckPassed ignored -> {
// success: document passed all pre-checks
// In scope (no KI, no target copy), PreCheckPassed is the terminal success

View File

@@ -235,12 +235,12 @@ public class DefaultBatchRunProcessingUseCase implements BatchRunProcessingUseCa
// Delegate the complete processing logic to the processor
// The processor handles loading document master record, checking terminal status,
// executing pipeline only when needed, and persisting results consistently
documentProcessingCoordinator.processWithM3Execution(
documentProcessingCoordinator.processDeferredOutcome(
candidate,
fingerprint,
context,
attemptStart,
this::runM3Pipeline); // Pass the executor as a function
this::runExtractionPipeline);
}
}
}
@@ -255,7 +255,7 @@ public class DefaultBatchRunProcessingUseCase implements BatchRunProcessingUseCa
* @param candidate the candidate to run through the pipeline
* @return the pipeline outcome (pre-check passed, pre-check failed, or technical error)
*/
private DocumentProcessingOutcome runM3Pipeline(SourceDocumentCandidate candidate) {
private DocumentProcessingOutcome runExtractionPipeline(SourceDocumentCandidate candidate) {
PdfExtractionResult extractionResult =
pdfTextExtractionPort.extractTextAndPageCount(candidate);

View File

@@ -87,10 +87,10 @@ class DocumentProcessingCoordinatorTest {
@Test
void process_newDocument_preCheckPassed_persistsSuccessStatus() {
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome m3Outcome = new PreCheckPassed(
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
// One attempt written
assertEquals(1, attemptRepo.savedAttempts.size());
@@ -113,10 +113,10 @@ class DocumentProcessingCoordinatorTest {
@Test
void process_newDocument_firstContentError_persistsFailedRetryable_contentCounterOne() {
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome m3Outcome = new PreCheckFailed(
DocumentProcessingOutcome outcome = new PreCheckFailed(
candidate, PreCheckFailureReason.NO_USABLE_TEXT);
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, attemptRepo.savedAttempts.size());
ProcessingAttempt attempt = attemptRepo.savedAttempts.get(0);
@@ -135,10 +135,10 @@ class DocumentProcessingCoordinatorTest {
@Test
void process_newDocument_technicalError_persistsFailedRetryable_transientCounterOne() {
recordRepo.setLookupResult(new DocumentUnknown());
DocumentProcessingOutcome m3Outcome = new TechnicalDocumentError(
DocumentProcessingOutcome outcome = new TechnicalDocumentError(
candidate, "I/O error", null);
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, attemptRepo.savedAttempts.size());
ProcessingAttempt attempt = attemptRepo.savedAttempts.get(0);
@@ -164,10 +164,10 @@ class DocumentProcessingCoordinatorTest {
new FailureCounters(1, 0));
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
DocumentProcessingOutcome m3Outcome = new PreCheckFailed(
DocumentProcessingOutcome outcome = new PreCheckFailed(
candidate, PreCheckFailureReason.PAGE_LIMIT_EXCEEDED);
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, attemptRepo.savedAttempts.size());
ProcessingAttempt attempt = attemptRepo.savedAttempts.get(0);
@@ -188,10 +188,10 @@ class DocumentProcessingCoordinatorTest {
new FailureCounters(0, 2));
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
DocumentProcessingOutcome m3Outcome = new TechnicalDocumentError(
DocumentProcessingOutcome outcome = new TechnicalDocumentError(
candidate, "Timeout", null);
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, recordRepo.updatedRecords.size());
DocumentRecord record = recordRepo.updatedRecords.get(0);
@@ -208,10 +208,10 @@ class DocumentProcessingCoordinatorTest {
new FailureCounters(0, 1));
recordRepo.setLookupResult(new DocumentKnownProcessable(existingRecord));
DocumentProcessingOutcome m3Outcome = new PreCheckPassed(
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, recordRepo.updatedRecords.size());
DocumentRecord record = recordRepo.updatedRecords.get(0);
@@ -233,10 +233,10 @@ class DocumentProcessingCoordinatorTest {
FailureCounters.zero());
recordRepo.setLookupResult(new DocumentTerminalSuccess(existingRecord));
DocumentProcessingOutcome m3Outcome = new PreCheckPassed(
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, attemptRepo.savedAttempts.size());
ProcessingAttempt attempt = attemptRepo.savedAttempts.get(0);
@@ -261,10 +261,10 @@ class DocumentProcessingCoordinatorTest {
new FailureCounters(2, 0));
recordRepo.setLookupResult(new DocumentTerminalFinalFailure(existingRecord));
DocumentProcessingOutcome m3Outcome = new PreCheckFailed(
DocumentProcessingOutcome outcome = new PreCheckFailed(
candidate, PreCheckFailureReason.NO_USABLE_TEXT);
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, attemptRepo.savedAttempts.size());
ProcessingAttempt attempt = attemptRepo.savedAttempts.get(0);
@@ -304,12 +304,12 @@ class DocumentProcessingCoordinatorTest {
void process_persistenceLookupFailure_noAttemptWritten_noException() {
recordRepo.setLookupResult(new PersistenceLookupTechnicalFailure("DB unavailable", null));
DocumentProcessingOutcome m3Outcome = new PreCheckPassed(
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
// Must not throw
assertDoesNotThrow(() ->
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart));
processor.process(candidate, fingerprint, outcome, context, attemptStart));
// No attempt written, no record created/updated
assertEquals(0, attemptRepo.savedAttempts.size(),
@@ -328,12 +328,12 @@ class DocumentProcessingCoordinatorTest {
// Make the unit of work throw
unitOfWorkPort.failOnExecute = true;
DocumentProcessingOutcome m3Outcome = new PreCheckPassed(
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
// Must not propagate the exception
assertDoesNotThrow(() ->
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart));
processor.process(candidate, fingerprint, outcome, context, attemptStart));
}
// -------------------------------------------------------------------------
@@ -345,10 +345,10 @@ class DocumentProcessingCoordinatorTest {
recordRepo.setLookupResult(new DocumentUnknown());
attemptRepo.nextAttemptNumber = 3; // Simulate 2 prior attempts
DocumentProcessingOutcome m3Outcome = new PreCheckPassed(
DocumentProcessingOutcome outcome = new PreCheckPassed(
candidate, new PdfExtractionSuccess("text", new PdfPageCount(1)));
processor.process(candidate, fingerprint, m3Outcome, context, attemptStart);
processor.process(candidate, fingerprint, outcome, context, attemptStart);
assertEquals(1, attemptRepo.savedAttempts.size());
assertEquals(3, attemptRepo.savedAttempts.get(0).attemptNumber(),

View File

@@ -635,24 +635,24 @@ class BatchRunProcessingUseCaseTest {
public void process(
de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate candidate,
de.gecheckt.pdf.umbenenner.domain.model.DocumentFingerprint fingerprint,
de.gecheckt.pdf.umbenenner.domain.model.DocumentProcessingOutcome m3Outcome,
de.gecheckt.pdf.umbenenner.domain.model.DocumentProcessingOutcome outcome,
de.gecheckt.pdf.umbenenner.domain.model.BatchRunContext context,
java.time.Instant attemptStart) {
processCallCount++;
// Delegate to super so the real logic runs (with no-op repos)
super.process(candidate, fingerprint, m3Outcome, context, attemptStart);
super.process(candidate, fingerprint, outcome, context, attemptStart);
}
@Override
public void processWithM3Execution(
public void processDeferredOutcome(
de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate candidate,
de.gecheckt.pdf.umbenenner.domain.model.DocumentFingerprint fingerprint,
de.gecheckt.pdf.umbenenner.domain.model.BatchRunContext context,
java.time.Instant attemptStart,
java.util.function.Function<de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate, de.gecheckt.pdf.umbenenner.domain.model.DocumentProcessingOutcome> m3Executor) {
java.util.function.Function<de.gecheckt.pdf.umbenenner.domain.model.SourceDocumentCandidate, de.gecheckt.pdf.umbenenner.domain.model.DocumentProcessingOutcome> pipelineExecutor) {
processCallCount++;
// Delegate to super so the real logic runs (with no-op repos)
super.processWithM3Execution(candidate, fingerprint, context, attemptStart, m3Executor);
super.processDeferredOutcome(candidate, fingerprint, context, attemptStart, pipelineExecutor);
}
int processCallCount() { return processCallCount; }