M1 Vollständiger Grundstand mit Build, Konfiguration, Tests und Smoke-Tests
This commit is contained in:
1
pdf-umbenenner-bootstrap/src/test/java/.gitkeep
Normal file
1
pdf-umbenenner-bootstrap/src/test/java/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
# Keep directory
|
||||
@@ -0,0 +1,201 @@
|
||||
package de.gecheckt.pdf.umbenenner.bootstrap;
|
||||
|
||||
import de.gecheckt.pdf.umbenenner.adapter.inbound.cli.SchedulerBatchCommand;
|
||||
import de.gecheckt.pdf.umbenenner.application.config.InvalidStartConfigurationException;
|
||||
import de.gecheckt.pdf.umbenenner.application.config.StartConfiguration;
|
||||
import de.gecheckt.pdf.umbenenner.application.config.StartConfigurationValidator;
|
||||
import de.gecheckt.pdf.umbenenner.application.port.in.RunBatchProcessingUseCase;
|
||||
import de.gecheckt.pdf.umbenenner.application.port.out.ConfigurationPort;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
import java.net.URI;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
/**
|
||||
* Unit tests for {@link BootstrapRunner}.
|
||||
* <p>
|
||||
* Tests cover the bootstrap orchestration behavior including success path,
|
||||
* invalid configuration handling, and unexpected failure handling.
|
||||
*/
|
||||
class BootstrapRunnerTest {
|
||||
|
||||
@TempDir
|
||||
Path tempDir;
|
||||
|
||||
@Test
|
||||
void run_returnsZeroOnSuccess() throws Exception {
|
||||
// Create a mock configuration port that returns valid config
|
||||
ConfigurationPort mockConfigPort = new MockConfigurationPort(tempDir, true);
|
||||
|
||||
// Create mock factories that return working components
|
||||
BootstrapRunner runner = new BootstrapRunner(
|
||||
() -> mockConfigPort,
|
||||
StartConfigurationValidator::new,
|
||||
port -> new MockRunBatchProcessingUseCase(true),
|
||||
useCase -> new SchedulerBatchCommand(useCase)
|
||||
);
|
||||
|
||||
int exitCode = runner.run();
|
||||
|
||||
assertEquals(0, exitCode, "Success path should return exit code 0");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_returnsTwoOnInvalidConfiguration() throws Exception {
|
||||
// Create a mock configuration port that returns valid config
|
||||
ConfigurationPort mockConfigPort = new MockConfigurationPort(tempDir, true);
|
||||
|
||||
// Create a custom validator that always throws InvalidStartConfigurationException
|
||||
StartConfigurationValidator failingValidator = new StartConfigurationValidator() {
|
||||
@Override
|
||||
public void validate(StartConfiguration config) {
|
||||
throw new InvalidStartConfigurationException("Simulated validation failure");
|
||||
}
|
||||
};
|
||||
|
||||
BootstrapRunner runner = new BootstrapRunner(
|
||||
() -> mockConfigPort,
|
||||
() -> failingValidator,
|
||||
port -> new MockRunBatchProcessingUseCase(true),
|
||||
useCase -> new SchedulerBatchCommand(useCase)
|
||||
);
|
||||
|
||||
int exitCode = runner.run();
|
||||
|
||||
assertEquals(2, exitCode, "Invalid configuration should return exit code 2");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_returnsTwoOnConfigurationLoadingFailure() {
|
||||
// Create a mock configuration port that throws IllegalStateException
|
||||
ConfigurationPort failingConfigPort = () -> {
|
||||
throw new IllegalStateException("Simulated configuration loading failure");
|
||||
};
|
||||
|
||||
BootstrapRunner runner = new BootstrapRunner(
|
||||
() -> failingConfigPort,
|
||||
StartConfigurationValidator::new,
|
||||
port -> new MockRunBatchProcessingUseCase(true),
|
||||
useCase -> new SchedulerBatchCommand(useCase)
|
||||
);
|
||||
|
||||
int exitCode = runner.run();
|
||||
|
||||
assertEquals(2, exitCode, "Configuration loading failure should return exit code 2");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_returnsOneOnUnexpectedException() {
|
||||
// Create a mock configuration port that throws a generic exception
|
||||
ConfigurationPort throwingConfigPort = () -> {
|
||||
throw new RuntimeException("Simulated unexpected failure");
|
||||
};
|
||||
|
||||
BootstrapRunner runner = new BootstrapRunner(
|
||||
() -> throwingConfigPort,
|
||||
StartConfigurationValidator::new,
|
||||
port -> new MockRunBatchProcessingUseCase(true),
|
||||
useCase -> new SchedulerBatchCommand(useCase)
|
||||
);
|
||||
|
||||
int exitCode = runner.run();
|
||||
|
||||
assertEquals(1, exitCode, "Unexpected exception should return exit code 1");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_returnsOneWhenCommandReturnsFalse() throws Exception {
|
||||
// Create a mock configuration port that returns valid config
|
||||
ConfigurationPort mockConfigPort = new MockConfigurationPort(tempDir, true);
|
||||
|
||||
// Create a use case that returns false
|
||||
RunBatchProcessingUseCase failingUseCase = () -> false;
|
||||
|
||||
BootstrapRunner runner = new BootstrapRunner(
|
||||
() -> mockConfigPort,
|
||||
StartConfigurationValidator::new,
|
||||
port -> failingUseCase,
|
||||
useCase -> new SchedulerBatchCommand(useCase)
|
||||
);
|
||||
|
||||
int exitCode = runner.run();
|
||||
|
||||
assertEquals(1, exitCode, "Command returning false should return exit code 1");
|
||||
}
|
||||
|
||||
@Test
|
||||
void run_withDefaultConstructor_usesRealImplementations() {
|
||||
// This test verifies that the default constructor creates a functional runner
|
||||
// We can't fully test it without actual config files, but we can verify instantiation
|
||||
BootstrapRunner runner = new BootstrapRunner();
|
||||
|
||||
assertNotNull(runner, "Default constructor should create a valid BootstrapRunner");
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock ConfigurationPort that returns a valid StartConfiguration.
|
||||
*/
|
||||
private static class MockConfigurationPort implements ConfigurationPort {
|
||||
private final Path tempDir;
|
||||
private final boolean shouldSucceed;
|
||||
|
||||
MockConfigurationPort(Path tempDir, boolean shouldSucceed) {
|
||||
this.tempDir = tempDir;
|
||||
this.shouldSucceed = shouldSucceed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StartConfiguration loadConfiguration() {
|
||||
if (!shouldSucceed) {
|
||||
throw new IllegalStateException("Mock configuration loading failed");
|
||||
}
|
||||
|
||||
try {
|
||||
Path sourceFolder = Files.createDirectory(tempDir.resolve("source"));
|
||||
Path targetFolder = Files.createDirectory(tempDir.resolve("target"));
|
||||
Path sqliteFile = Files.createFile(tempDir.resolve("db.sqlite"));
|
||||
Path promptTemplateFile = Files.createFile(tempDir.resolve("prompt.txt"));
|
||||
|
||||
return new StartConfiguration(
|
||||
sourceFolder,
|
||||
targetFolder,
|
||||
sqliteFile,
|
||||
URI.create("https://api.example.com"),
|
||||
"gpt-4",
|
||||
30,
|
||||
3,
|
||||
100,
|
||||
50000,
|
||||
promptTemplateFile,
|
||||
tempDir.resolve("lock.lock"),
|
||||
tempDir.resolve("logs"),
|
||||
"INFO",
|
||||
"test-api-key"
|
||||
);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Failed to create mock configuration", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock RunBatchProcessingUseCase that returns a configurable result.
|
||||
*/
|
||||
private static class MockRunBatchProcessingUseCase implements RunBatchProcessingUseCase {
|
||||
private final boolean shouldSucceed;
|
||||
|
||||
MockRunBatchProcessingUseCase(boolean shouldSucceed) {
|
||||
this.shouldSucceed = shouldSucceed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean execute() {
|
||||
return shouldSucceed;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,275 @@
|
||||
package de.gecheckt.pdf.umbenenner.bootstrap;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
/**
|
||||
* AP-008: Executable JAR smoke tests for M1 target state verification.
|
||||
* <p>
|
||||
* These tests verify that the shaded executable JAR can be run via {@code java -jar}
|
||||
* and behaves correctly for both success and invalid configuration scenarios.
|
||||
* <p>
|
||||
* Tests are executed by the maven-failsafe-plugin after the package phase.
|
||||
* The *IT suffix ensures failsafe picks them up as integration tests.
|
||||
*/
|
||||
class ExecutableJarSmokeTestIT {
|
||||
|
||||
private static final String JAVA_EXECUTABLE = "java";
|
||||
private static final long PROCESS_TIMEOUT_MS = 30000;
|
||||
|
||||
/**
|
||||
* Success-path smoke test: verifies the JAR starts successfully with valid configuration.
|
||||
* <p>
|
||||
* Verifies:
|
||||
* - The shaded JAR file exists
|
||||
* - java -jar executes successfully
|
||||
* - Exit code is 0
|
||||
* - Configuration is loaded successfully
|
||||
* - Logging produces output
|
||||
* - Process ends in a controlled way
|
||||
* - No functional PDF processing occurs
|
||||
*/
|
||||
@Test
|
||||
void jar_startsSuccessfullyWithValidConfiguration(@TempDir Path workDir) throws Exception {
|
||||
// Create runtime fixtures in the temporary working directory
|
||||
Path configDir = Files.createDirectory(workDir.resolve("config"));
|
||||
Path sourceDir = Files.createDirectory(workDir.resolve("source"));
|
||||
Path targetDir = Files.createDirectory(workDir.resolve("target"));
|
||||
Path logsDir = Files.createDirectory(workDir.resolve("logs"));
|
||||
Path dbParent = Files.createDirectory(workDir.resolve("data"));
|
||||
Path promptDir = Files.createDirectory(workDir.resolve("config/prompts"));
|
||||
|
||||
Path sqliteFile = Files.createFile(dbParent.resolve("pdf-umbenenner.db"));
|
||||
Path promptTemplateFile = Files.createFile(promptDir.resolve("template.txt"));
|
||||
Files.writeString(promptTemplateFile, "Test prompt template for smoke test.");
|
||||
|
||||
// Write valid application.properties
|
||||
Path configFile = configDir.resolve("application.properties");
|
||||
String validConfig = """
|
||||
source.folder=%s
|
||||
target.folder=%s
|
||||
sqlite.file=%s
|
||||
api.baseUrl=http://localhost:8080/api
|
||||
api.model=gpt-4o-mini
|
||||
api.timeoutSeconds=30
|
||||
max.retries.transient=3
|
||||
max.pages=10
|
||||
max.text.characters=5000
|
||||
prompt.template.file=%s
|
||||
runtime.lock.file=%s/lock.pid
|
||||
log.directory=%s
|
||||
log.level=INFO
|
||||
api.key=test-api-key-for-smoke-test
|
||||
""".formatted(
|
||||
sourceDir.toAbsolutePath(),
|
||||
targetDir.toAbsolutePath(),
|
||||
sqliteFile.toAbsolutePath(),
|
||||
promptTemplateFile.toAbsolutePath(),
|
||||
workDir.toAbsolutePath(),
|
||||
logsDir.toAbsolutePath()
|
||||
);
|
||||
Files.writeString(configFile, validConfig);
|
||||
|
||||
// Find the shaded JAR - look in target directory relative to project root
|
||||
Path projectRoot = Paths.get(System.getProperty("user.dir"));
|
||||
Path bootstrapTarget = projectRoot.resolve("pdf-umbenenner-bootstrap/target");
|
||||
|
||||
if (!Files.exists(bootstrapTarget)) {
|
||||
// Fallback: try relative from current execution context
|
||||
bootstrapTarget = Paths.get("target");
|
||||
}
|
||||
|
||||
assertTrue(Files.exists(bootstrapTarget), "Bootstrap target directory must exist: " + bootstrapTarget);
|
||||
|
||||
File[] jars = bootstrapTarget.toFile().listFiles((dir, name) ->
|
||||
name.endsWith(".jar") && !name.contains("original") && !name.contains("tests")
|
||||
);
|
||||
|
||||
assertNotNull(jars, "JAR files should exist in target directory");
|
||||
assertTrue(jars.length > 0, "At least one JAR should exist in target directory");
|
||||
|
||||
Path shadedJar = Paths.get(jars[0].getAbsolutePath());
|
||||
// Prefer the shaded JAR if multiple exist
|
||||
for (File jar : jars) {
|
||||
if (jar.getName().contains("shaded") || jar.getName().equals("pdf-umbenenner-bootstrap-0.0.1-SNAPSHOT.jar")) {
|
||||
shadedJar = jar.toPath().toAbsolutePath();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue(Files.exists(shadedJar), "Shaded JAR file must exist: " + shadedJar);
|
||||
|
||||
// Build the java -jar command
|
||||
List<String> command = new ArrayList<>();
|
||||
command.add(JAVA_EXECUTABLE);
|
||||
command.add("-jar");
|
||||
command.add(shadedJar.toString());
|
||||
|
||||
// Run the process in the work directory where config/application.properties will be found
|
||||
ProcessBuilder pb = new ProcessBuilder(command);
|
||||
pb.directory(workDir.toFile());
|
||||
pb.redirectErrorStream(true);
|
||||
|
||||
System.out.println("[SMOKE-TEST] JAR path: " + shadedJar.toAbsolutePath());
|
||||
System.out.println("[SMOKE-TEST] Working directory: " + workDir.toAbsolutePath());
|
||||
System.out.println("[SMOKE-TEST] Command: " + String.join(" ", command));
|
||||
|
||||
Process process = pb.start();
|
||||
|
||||
// Wait for process completion with timeout
|
||||
boolean completed = process.waitFor(PROCESS_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS);
|
||||
assertTrue(completed, "Process should complete within " + PROCESS_TIMEOUT_MS + "ms timeout");
|
||||
|
||||
int exitCode = process.exitValue();
|
||||
|
||||
// Capture all output for diagnostic purposes
|
||||
byte[] outputBytes = process.getInputStream().readAllBytes();
|
||||
String outputText = new String(outputBytes);
|
||||
|
||||
System.out.println("[SMOKE-TEST] Exit code: " + exitCode);
|
||||
System.out.println("[SMOKE-TEST] Subprocess stdout/stderr:\n" + outputText);
|
||||
|
||||
assertEquals(0, exitCode, "Successful startup should return exit code 0. Output was: " + outputText);
|
||||
|
||||
// Verify logging output was produced (check console output)
|
||||
assertTrue(
|
||||
outputText.contains("Starting") ||
|
||||
outputText.contains("Bootstrap") ||
|
||||
outputText.contains("completed") ||
|
||||
outputText.contains("successfully"),
|
||||
"Output should contain startup/shutdown indicators. Got: " + outputText
|
||||
);
|
||||
|
||||
// Verify no unexpected artifacts were created beyond our fixtures
|
||||
// Count top-level entries - should only be our created directories
|
||||
long fileCount = Files.list(workDir).count();
|
||||
assertTrue(fileCount <= 7, "No extra files should be created beyond fixtures. Found: " + fileCount + " entries");
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalid-configuration smoke test: verifies controlled failure with exit code 2.
|
||||
* <p>
|
||||
* Verifies:
|
||||
* - java -jar runs against invalid configuration
|
||||
* - Exit code is 2
|
||||
* - Startup fails before any processing
|
||||
* - Failure is controlled (not a crash/hang)
|
||||
* - Error output indicates configuration validation failure
|
||||
*/
|
||||
@Test
|
||||
void jar_failsControlledWithInvalidConfiguration(@TempDir Path workDir) throws Exception {
|
||||
// Create runtime fixtures with INVALID configuration
|
||||
Path configDir = Files.createDirectory(workDir.resolve("config"));
|
||||
Path sourceDir = Files.createDirectory(workDir.resolve("source"));
|
||||
// Intentionally missing target folder - this should cause validation failure
|
||||
|
||||
Path dbParent = Files.createDirectory(workDir.resolve("data"));
|
||||
Path promptDir = Files.createDirectory(workDir.resolve("config/prompts"));
|
||||
|
||||
Path sqliteFile = Files.createFile(dbParent.resolve("pdf-umbenenner.db"));
|
||||
Path promptTemplateFile = Files.createFile(promptDir.resolve("template.txt"));
|
||||
Files.writeString(promptTemplateFile, "Test prompt template.");
|
||||
|
||||
// Write INVALID application.properties (missing required target.folder)
|
||||
Path configFile = configDir.resolve("application.properties");
|
||||
String invalidConfig = """
|
||||
source.folder=%s
|
||||
# target.folder is intentionally missing - should cause validation failure
|
||||
sqlite.file=%s
|
||||
api.baseUrl=http://localhost:8080/api
|
||||
api.model=gpt-4o-mini
|
||||
api.timeoutSeconds=30
|
||||
max.retries.transient=3
|
||||
max.pages=10
|
||||
max.text.characters=5000
|
||||
prompt.template.file=%s
|
||||
log.directory=%s/logs
|
||||
log.level=INFO
|
||||
api.key=test-api-key
|
||||
""".formatted(
|
||||
sourceDir.toAbsolutePath(),
|
||||
sqliteFile.toAbsolutePath(),
|
||||
promptTemplateFile.toAbsolutePath(),
|
||||
workDir.toAbsolutePath()
|
||||
);
|
||||
Files.writeString(configFile, invalidConfig);
|
||||
|
||||
// Find the shaded JAR
|
||||
Path projectRoot = Paths.get(System.getProperty("user.dir"));
|
||||
Path bootstrapTarget = projectRoot.resolve("pdf-umbenenner-bootstrap/target");
|
||||
|
||||
if (!Files.exists(bootstrapTarget)) {
|
||||
bootstrapTarget = Paths.get("target");
|
||||
}
|
||||
|
||||
assertTrue(Files.exists(bootstrapTarget), "Bootstrap target directory must exist: " + bootstrapTarget);
|
||||
|
||||
File[] jars = bootstrapTarget.toFile().listFiles((dir, name) ->
|
||||
name.endsWith(".jar") && !name.contains("original") && !name.contains("tests")
|
||||
);
|
||||
|
||||
assertNotNull(jars, "JAR files should exist in target directory");
|
||||
assertTrue(jars.length > 0, "At least one JAR should exist");
|
||||
|
||||
Path shadedJar = Paths.get(jars[0].getAbsolutePath());
|
||||
for (File jar : jars) {
|
||||
if (jar.getName().contains("shaded") || jar.getName().equals("pdf-umbenenner-bootstrap-0.0.1-SNAPSHOT.jar")) {
|
||||
shadedJar = jar.toPath().toAbsolutePath();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue(Files.exists(shadedJar), "Shaded JAR file must exist: " + shadedJar);
|
||||
|
||||
// Build the java -jar command
|
||||
List<String> command = new ArrayList<>();
|
||||
command.add(JAVA_EXECUTABLE);
|
||||
command.add("-jar");
|
||||
command.add(shadedJar.toString());
|
||||
|
||||
// Run the process
|
||||
ProcessBuilder pb = new ProcessBuilder(command);
|
||||
pb.directory(workDir.toFile());
|
||||
pb.redirectErrorStream(true);
|
||||
|
||||
System.out.println("[SMOKE-TEST-INVALID] JAR path: " + shadedJar.toAbsolutePath());
|
||||
System.out.println("[SMOKE-TEST-INVALID] Working directory: " + workDir.toAbsolutePath());
|
||||
System.out.println("[SMOKE-TEST-INVALID] Command: " + String.join(" ", command));
|
||||
|
||||
Process process = pb.start();
|
||||
|
||||
// Wait for process completion with timeout
|
||||
boolean completed = process.waitFor(PROCESS_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS);
|
||||
assertTrue(completed, "Process should complete within timeout even on failure");
|
||||
|
||||
int exitCode = process.exitValue();
|
||||
|
||||
// Capture all output for diagnostic purposes
|
||||
byte[] outputBytes = process.getInputStream().readAllBytes();
|
||||
String outputText = new String(outputBytes);
|
||||
|
||||
System.out.println("[SMOKE-TEST-INVALID] Exit code: " + exitCode);
|
||||
System.out.println("[SMOKE-TEST-INVALID] Subprocess stdout/stderr:\n" + outputText);
|
||||
|
||||
assertEquals(2, exitCode, "Invalid configuration should return exit code 2. Output was: " + outputText);
|
||||
|
||||
// Verify error output indicates configuration failure
|
||||
assertTrue(
|
||||
outputText.toLowerCase().contains("config") ||
|
||||
outputText.toLowerCase().contains("validation") ||
|
||||
outputText.toLowerCase().contains("invalid") ||
|
||||
outputText.toLowerCase().contains("error") ||
|
||||
outputText.toLowerCase().contains("failed"),
|
||||
"Output should indicate configuration/validation error. Got: " + outputText
|
||||
);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user