From c9022cb09da3726af27a35191e2b087acf01faaf Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 21 Aug 2024 06:54:03 -0400 Subject: [PATCH 01/38] Setup some new values to reduce the amount of time that is spent running repeat tests in the attempt to make leak detection easier. Signed-off-by: Greg Schohn --- .../NettyLeakCheckTestExtensionTest.java | 65 +++++++++++++++++++ .../NettyLeakCheckTestExtension.java | 47 +++++++++++--- .../testutils/WrapWithNettyLeakDetection.java | 39 +++++++++-- 3 files changed, 135 insertions(+), 16 deletions(-) create mode 100644 testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java diff --git a/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java b/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java new file mode 100644 index 000000000..b6e45ebc0 --- /dev/null +++ b/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java @@ -0,0 +1,65 @@ +package org.opensearch.migrations.testutils; + +import java.time.Duration; +import java.util.concurrent.atomic.AtomicInteger; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; + + +@Execution(ExecutionMode.SAME_THREAD) +class NettyLeakCheckTestExtensionTest { + + String testCase; + int counter; + long startTimeNanos; + + @BeforeEach + public void beforeTest() { + testCase = null; + counter = 0; + startTimeNanos = System.nanoTime(); + } + + @AfterEach + public void afterTest() { + var observedTestDuration = Duration.ofNanos(System.nanoTime()-startTimeNanos); + switch (testCase) { + case "testMaxTimeSupercedesReps": + Assertions.assertTrue(counter < 20, "counter=" + counter); + Assertions.assertTrue(Duration.ofMillis(100).minus(observedTestDuration).isNegative()); + break; + case "testMinTimeSupercedesReps": + Assertions.assertTrue(counter > 1, "counter=" + counter); + Assertions.assertTrue(Duration.ofMillis(100).minus(observedTestDuration).isNegative()); + break; + default: + Assertions.fail("unknown test case: " + testCase); + } + } + + @Test + @WrapWithNettyLeakDetection(maxRuntimeMillis = 100, repetitions = 20) + public void testMaxTimeSupercedesReps() throws Exception { + testCase = getMyMethodName(); + ++counter; + Thread.sleep(10); + } + + @Test + @WrapWithNettyLeakDetection(minRuntimeMillis = 100, repetitions = 1) + public void testMinTimeSupercedesReps() throws Exception { + testCase = getMyMethodName(); + ++counter; + Thread.sleep(10); + } + + private static String getMyMethodName() { + var backtrace = Thread.currentThread().getStackTrace(); + return backtrace[2].getMethodName(); + } +} \ No newline at end of file diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtension.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtension.java index 1dadf85bd..ac61e0fbd 100644 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtension.java +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtension.java @@ -12,6 +12,7 @@ import lombok.Lombok; public class NettyLeakCheckTestExtension implements InvocationInterceptor { + public static final int DEFAULT_NUM_REPETITIONS = 16; private final boolean allLeakChecksAreDisabled; public NettyLeakCheckTestExtension() { @@ -23,19 +24,49 @@ private void wrapWithLeakChecks( Callable repeatCall, Callable finalCall ) throws Throwable { - if (allLeakChecksAreDisabled || getAnnotation(extensionContext).map(a -> a.disableLeakChecks()).orElse(false)) { + if (allLeakChecksAreDisabled || getAnnotation(extensionContext).map(WrapWithNettyLeakDetection::disableLeakChecks).orElse(false)) { CountingNettyResourceLeakDetector.deactivate(); finalCall.call(); return; } else { CountingNettyResourceLeakDetector.activate(); - int repetitions = getAnnotation(extensionContext).map(a -> a.repetitions()) + var repetitions = getAnnotation(extensionContext).map(WrapWithNettyLeakDetection::repetitions) .orElseThrow(() -> new IllegalStateException("No test method present")); + var minRuntimeMs = getAnnotation(extensionContext).map(WrapWithNettyLeakDetection::minRuntimeMillis) + .orElseThrow(() -> new IllegalStateException("No test method present")); + var maxRuntimeMs = getAnnotation(extensionContext).map(WrapWithNettyLeakDetection::maxRuntimeMillis) + .orElseThrow(() -> new IllegalStateException("No test method present")); + if (repetitions == -1 && + minRuntimeMs == -1 && + maxRuntimeMs == -1) { + repetitions = DEFAULT_NUM_REPETITIONS; + } + assert minRuntimeMs <= 0 || maxRuntimeMs <= 0 || minRuntimeMs <= maxRuntimeMs : + "expected maxRuntime to be >= minRuntime"; - for (int i = 0; i < repetitions; i++) { - ((i == repetitions - 1) ? finalCall : repeatCall).call(); + long nanosSpent = 0; + for (int runNumber = 1; ; runNumber++) { + var startTimeNanos = System.nanoTime(); + boolean lastRun = false; + { + var timeSpentMs = nanosSpent / (1000 * 1000); + if (repetitions >= 0) { + lastRun = runNumber >= repetitions; + } + if (minRuntimeMs > 0) { + lastRun = timeSpentMs >= minRuntimeMs; + } + if (maxRuntimeMs > 0 && !lastRun) { + lastRun = timeSpentMs >= maxRuntimeMs; + } + } + (lastRun ? finalCall : repeatCall).call(); + nanosSpent += (System.nanoTime() - startTimeNanos); System.gc(); System.runFinalization(); + if (lastRun) { + break; + } } Assertions.assertEquals(0, CountingNettyResourceLeakDetector.getNumLeaks()); @@ -76,11 +107,9 @@ public void interceptTestTemplateMethod( var selfInstance = invocationContext.getTarget() .orElseThrow(() -> new IllegalStateException("Target instance not found")); wrapWithLeakChecks(extensionContext, () -> { - { - Method m = invocationContext.getExecutable(); - m.setAccessible(true); - return m.invoke(selfInstance, invocationContext.getArguments().toArray()); - } + Method m = invocationContext.getExecutable(); + m.setAccessible(true); + return m.invoke(selfInstance, invocationContext.getArguments().toArray()); }, () -> wrapProceed(invocation)); } diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java index fd8a9bc8e..e2d757b55 100644 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java @@ -4,22 +4,47 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.time.Duration; import org.junit.jupiter.api.extension.ExtendWith; +/** + * This annotation causes a test to be run within the NettyLeakCheckTestExtension wrapper. + * That will run a test multiple times with a CountingNettyResourceLeakDetector set as the + * ByteBuf allocator to detect memory leaks.

+ * + * Some leaks might need to put a bit more stress on the GC for objects to get cleared out + * and trigger potential checks within any resource finalizers to determine if there have + * been leaks. This could also be used to make leaks more obvious as the test environment + * itself will have many resources and looking for just one rogue ByteBufHolder in the hmap + * file could be difficult.

+ * + * In case min/max values for repetitions and runtime contradict each other, the test will + * run enough times to meet the minimum requirements even if the max repetitions or runtime + * is surpassed. + */ @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) @ExtendWith(NettyLeakCheckTestExtension.class) public @interface WrapWithNettyLeakDetection { /** - * Some leaks might need to put a bit more stress on the GC for objects to get cleared out - * and trigger potential checks within any resource finalizers to determine if there have - * been leaks. This could also be used to make leaks more obvious as the test environment - * itself will have many resources and looking for just one rogue ByteBufHolder in an hmap - * file could be difficult. - * @return + * How many repetitions the test should run, provided that it hasn't gone over the maxRuntime (if specified) + * and has run enough times to meet the minRuntime (if specified) */ - int repetitions() default 16; + int repetitions() default -1; + + /** + * Like repetitions this is a guesstimate to be provided to make sure that a test will + * put enough ByteBuf pressure and activity to trigger exceptions and be useful in dumps. + * This may take precedence over a repetitions value that is otherwise too small. + */ + long minRuntimeMillis() default -1; + /** + * Like repetitions this is a guesstimate to be provided to make sure that a test will + * put enough ByteBuf pressure and activity to trigger exceptions and be useful in dumps. + * This may take precedence over a repetitions value that is too large. + */ + long maxRuntimeMillis() default -1; /** * Set this to true to disable running any netty leak checks. This will cause the test to be From 607da8e801d8db16cd0f299e16274aa798e0ed79 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 27 Aug 2024 09:09:24 -0400 Subject: [PATCH 02/38] Spotless cleanup Signed-off-by: Greg Schohn --- .../migrations/testutils/NettyLeakCheckTestExtensionTest.java | 3 +-- .../migrations/testutils/WrapWithNettyLeakDetection.java | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java b/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java index b6e45ebc0..30d301ec4 100644 --- a/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java +++ b/testHelperFixtures/src/test/java/org/opensearch/migrations/testutils/NettyLeakCheckTestExtensionTest.java @@ -1,7 +1,6 @@ package org.opensearch.migrations.testutils; import java.time.Duration; -import java.util.concurrent.atomic.AtomicInteger; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; @@ -62,4 +61,4 @@ private static String getMyMethodName() { var backtrace = Thread.currentThread().getStackTrace(); return backtrace[2].getMethodName(); } -} \ No newline at end of file +} diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java index e2d757b55..2e2a2e181 100644 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/WrapWithNettyLeakDetection.java @@ -4,7 +4,6 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import java.time.Duration; import org.junit.jupiter.api.extension.ExtendWith; From eeeb8ba955083ff629ff032a23d35918676ce54d Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 9 Sep 2024 23:50:26 -0400 Subject: [PATCH 03/38] Speedup test time by marking more tests as longTests and reducing repeat counts for some tests Signed-off-by: Greg Schohn --- .../replay/ResultsToLogsConsumerTest.java | 3 +++ .../migrations/replay/TrafficReplayerTest.java | 1 + .../replay/datahandlers/JsonAccumulatorTest.java | 15 +++++++++++++-- .../NettyPacketToHttpConsumerTest.java | 2 ++ .../http/HttpJsonTransformingConsumerTest.java | 10 ++++++++++ .../replay/e2etests/FullTrafficReplayerTest.java | 2 +- .../replay/TestCapturePacketToHttpHandler.java | 9 +++++---- 7 files changed, 35 insertions(+), 7 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java index 930118c23..610c63baa 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ResultsToLogsConsumerTest.java @@ -15,6 +15,7 @@ import org.apache.logging.log4j.core.impl.Log4jContextFactory; import org.apache.logging.log4j.core.selector.ClassLoaderContextSelector; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.parallel.ResourceLock; @@ -116,6 +117,7 @@ private static byte[] loadResourceAsBytes(String path) throws IOException { } @Test + @Tag("longTest") @ResourceLock("TestContext") public void testOutputterForGet() throws IOException { final String EXPECTED_LOGGED_OUTPUT = "" @@ -174,6 +176,7 @@ public void testOutputterForGet() throws IOException { } @Test + @Tag("longTest") @ResourceLock("TestContext") public void testOutputterForPost() throws IOException { final String EXPECTED_LOGGED_OUTPUT = "" diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 5fb501c5f..2d9f82415 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -183,6 +183,7 @@ static byte[] synthesizeTrafficStreamsIntoByteArray(Instant timestamp, int numSt } @Test + @WrapWithNettyLeakDetection(repetitions = 1) public void testReader() throws Exception { var uri = new URI("http://localhost:9200"); try ( diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/JsonAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/JsonAccumulatorTest.java index cc9683e1c..37d157f15 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/JsonAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/JsonAccumulatorTest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; @@ -69,13 +70,24 @@ byte[] getData(String key) throws IOException { @ParameterizedTest @CsvSource({ "tiny,2", - "tiny,20000", + "tiny,20000"}) + public void testAccumulationShort(String dataName, int chunkBound) throws IOException { + testAccumulation(dataName, chunkBound); + } + + @ParameterizedTest + @Tag("longTest") + @CsvSource({ "medium,2", "medium,20000", "large,2", "large,20000", "largeAndPacked,2", "largeAndPacked,20000" }) + public void testAccumulationLong(String dataName, int chunkBound) throws IOException { + testAccumulation(dataName, chunkBound); + } + public void testAccumulation(String dataName, int chunkBound) throws IOException { var testFileBytes = getData(dataName); var outputJson = readJson(testFileBytes, 2); @@ -84,6 +96,5 @@ public void testAccumulation(String dataName, int chunkBound) throws IOException var jacksonParsedRoundTripped = mapper.writeValueAsString(mapper.readTree(testFileBytes)); var jsonAccumParsedRoundTripped = mapper.writeValueAsString(outputJson); Assertions.assertEquals(jacksonParsedRoundTripped, jsonAccumParsedRoundTripped); - } } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index a787f52b7..04365af74 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -396,6 +396,7 @@ public void testMetricCountsFor_testThatConnectionsAreKeptAliveAndShared(boolean } @ParameterizedTest + @Tag("longTest") @CsvSource({ "false", "true" }) public void testResponseTakesLongerThanTimeout(boolean useTls) throws Exception { var responseTimeout = Duration.ofMillis(50); @@ -447,6 +448,7 @@ public void testResponseTakesLongerThanTimeout(boolean useTls) throws Exception } @ParameterizedTest + @Tag("longTest") @CsvSource({ "false", "true" }) public void testTimeBetweenRequestsLongerThanResponseTimeout(boolean useTls) throws Exception { var responseTimeout = Duration.ofMillis(100); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java index 838e73cb1..3774f64e7 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/http/HttpJsonTransformingConsumerTest.java @@ -11,6 +11,7 @@ import java.util.stream.Stream; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -48,8 +49,17 @@ private static Stream provideTestParameters() { ); } + @Test + @WrapWithNettyLeakDetection(repetitions = 2) + public void testSomeRequestProcessing() throws Exception { + var args = provideTestParameters().findFirst().get(); + testRequestProcessing((Integer) args.get()[0], (Boolean) args.get()[1], (String) args.get()[2]); + } + @ParameterizedTest @MethodSource("provideTestParameters") + @Tag("longTest") + @WrapWithNettyLeakDetection(repetitions = 2) public void testRequestProcessing(Integer attemptedChunks, Boolean hostTransformation, String requestFile) throws Exception { final var dummyAggregatedResponse = new AggregatedRawResponse(null, 17, Duration.ZERO, List.of(), null); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java index e159ddee6..eb31a7762 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java @@ -316,8 +316,8 @@ public CommitResult commitTrafficStream(ITrafficStreamKey trafficStreamKey) thro } @Test + @Tag("longTest") public void makeSureThatCollateralDamageDoesntFreezeTests() throws Throwable { - var imposterThread = new Thread(() -> { while (true) { try { diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestCapturePacketToHttpHandler.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestCapturePacketToHttpHandler.java index 2311de00c..fd2e32857 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestCapturePacketToHttpHandler.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestCapturePacketToHttpHandler.java @@ -45,19 +45,20 @@ public TestCapturePacketToHttpHandler( @Override public TrackedFuture consumeBytes(ByteBuf nextRequestPacket) { numConsumes.incrementAndGet(); - log.info("incoming buffer refcnt=" + nextRequestPacket.refCnt()); + log.atDebug().setMessage(()->"incoming buffer refcnt=" + nextRequestPacket.refCnt()).log(); var duplicatedPacket = nextRequestPacket.retainedDuplicate(); return new TrackedFuture<>(CompletableFuture.runAsync(() -> { try { - log.info("Running async future for " + nextRequestPacket); + log.atDebug().setMessage(()->"Running async future for " + nextRequestPacket).log(); Thread.sleep(consumeDuration.toMillis()); - log.info("woke up from sleeping for " + nextRequestPacket); + log.atDebug().setMessage(()->"woke up from sleeping for " + nextRequestPacket).log(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Lombok.sneakyThrow(e); } try { - log.info("At the time of committing the buffer, refcnt=" + duplicatedPacket.refCnt()); + log.atDebug() + .setMessage(()->"At the time of committing the buffer, refcnt=" + duplicatedPacket.refCnt()).log(); duplicatedPacket.readBytes(byteArrayOutputStream, nextRequestPacket.readableBytes()); duplicatedPacket.release(); } catch (IOException e) { From 4db8e523a08e2654ad5b3d6256fd335fb9ecf531 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 10 Sep 2024 08:40:12 -0400 Subject: [PATCH 04/38] Make all gradle projects run in parallel. Signed-off-by: Greg Schohn --- gradle.properties | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/gradle.properties b/gradle.properties index 131ae1c2b..9de85ba77 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,7 @@ org.gradle.caching=true +org.gradle.configuration-cache=true +org.gradle.configureondemand=true +# Set Gradle Daemon's idle timeout to 30 minutes +org.gradle.daemon.idletimeout=1800000 org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 +org.gradle.parallel=true From c5d747e2389ec64dffef4b2c1b8122f58edd9436 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 10 Sep 2024 17:02:15 -0400 Subject: [PATCH 05/38] Quiet down proxy server unit tests. Reduced test logging level to INFO and shut down the event loop for a test that had been letting it run amok, littering actions and log messages for the duration of the process. Signed-off-by: Greg Schohn --- .../proxyserver/netty/ExpiringSubstitutableItemPoolTest.java | 2 ++ .../src/test/resources/log4j2.properties | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java index 89f350ee8..b3169cc60 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java @@ -157,6 +157,8 @@ void get() throws Exception { Assertions.assertTrue( pool.getStats().averageWaitTime().toMillis() < pool.getStats().averageBuildTime().toMillis() ); + + eventLoop.shutdownGracefully().sync(); } private static Integer getNextItem(ExpiringSubstitutableItemPool, Integer> pool) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties b/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties index 702836711..59caa9889 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties @@ -6,6 +6,6 @@ appender.console.target = SYSTEM_ERR appender.console.layout.type = PatternLayout appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} [%t] %c{1} - %msg%equals{ ctx=%mdc}{ ctx=\{\}}{}%n -rootLogger.level = debug +rootLogger.level = info rootLogger.appenderRefs = stderr rootLogger.appenderRef.stderr.ref = STDERR From 1d32cb3091494df11ab5f1de68cb821431b7235c Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 10 Sep 2024 22:06:45 -0400 Subject: [PATCH 06/38] More parallelization of the build plus an "isolatedTest" tag and targets so that those tests can run in isolation from any other tests. Signed-off-by: Greg Schohn --- .../rfs/ParallelDocumentMigrationsTest.java | 2 +- .../opensearch/migrations/EndToEndTest.java | 2 +- .../rfs/common/LuceneDocumentsReaderTest.java | 2 + TrafficCapture/README.md | 2 +- .../KafkaConfigurationCaptureProxyTest.java | 2 + .../replay/TrafficReplayerTest.java | 1 + .../NettyPacketToHttpConsumerTest.java | 2 +- .../e2etests/FullTrafficReplayerTest.java | 4 +- .../KafkaRestartingTrafficReplayerTest.java | 2 +- .../replay/http/retries/HttpRetryTest.java | 2 +- ...KafkaTrafficCaptureSourceLongTermTest.java | 2 +- build.gradle | 44 ++++++++++++++++++- 12 files changed, 56 insertions(+), 11 deletions(-) diff --git a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java index 093f6360f..4304b556d 100644 --- a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java +++ b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java @@ -33,7 +33,7 @@ import lombok.Lombok; import lombok.extern.slf4j.Slf4j; -@Tag("longTest") +@Tag("isolatedTest") @Slf4j public class ParallelDocumentMigrationsTest extends SourceTestBase { diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/EndToEndTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/EndToEndTest.java index 9fd72c870..b77a9329d 100644 --- a/MetadataMigration/src/test/java/org/opensearch/migrations/EndToEndTest.java +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/EndToEndTest.java @@ -29,7 +29,7 @@ /** * Tests focused on setting up whole source clusters, performing a migration, and validation on the target cluster */ -@Tag("longTest") +@Tag("isolatedTest") @Slf4j class EndToEndTest { diff --git a/RFS/src/test/java/com/rfs/common/LuceneDocumentsReaderTest.java b/RFS/src/test/java/com/rfs/common/LuceneDocumentsReaderTest.java index 99286b27e..50003f205 100644 --- a/RFS/src/test/java/com/rfs/common/LuceneDocumentsReaderTest.java +++ b/RFS/src/test/java/com/rfs/common/LuceneDocumentsReaderTest.java @@ -25,6 +25,7 @@ import org.apache.lucene.util.BytesRef; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -133,6 +134,7 @@ public void ReadDocuments_AsExpected(Snapshot snapshot, Version version) throws } @Test + @Tag("isolatedTest") void testParallelReading() throws Exception { // Create a mock IndexReader with multiple leaves (segments) int numSegments = 10; diff --git a/TrafficCapture/README.md b/TrafficCapture/README.md index bcf4e21d7..943633cfd 100644 --- a/TrafficCapture/README.md +++ b/TrafficCapture/README.md @@ -154,7 +154,7 @@ This command compiles the source code and runs the quick unit tests, ensuring th For a comprehensive test run, including both quick unit tests and more extensive slow tests, execute: ```sh -../gradlew test slowTest --rerun +../gradlew allTests --rerun ``` This command initiates all tests, ensuring thorough validation of the project. The `--rerun` option is used to ignore existing task output cache for the specified tasks. diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/KafkaConfigurationCaptureProxyTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/KafkaConfigurationCaptureProxyTest.java index a7895a78e..98e85baac 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/KafkaConfigurationCaptureProxyTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/KafkaConfigurationCaptureProxyTest.java @@ -11,6 +11,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.function.ThrowingConsumer; import org.junit.jupiter.params.ParameterizedTest; @@ -32,6 +33,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; @Slf4j +@Tag("isolatedTest") @KafkaContainerTest @HttpdContainerTest @ToxiproxyContainerTest diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java index 2d9f82415..dd23317e9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/TrafficReplayerTest.java @@ -257,6 +257,7 @@ public void onTrafficStreamIgnored( @Test @Tag("longTest") + @WrapWithNettyLeakDetection(repetitions = 2) public void testCapturedReadsAfterCloseAreHandledAsNew() throws Exception { var uri = new URI("http://localhost:9200"); try ( diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 04365af74..6ad0aa168 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -195,7 +195,6 @@ public void testHttpResponseIsSuccessfullyCaptured(boolean useTls, boolean large @CsvSource({ "false, false", "false, true", "true, false", "true, true" }) @Tag("longTest") @WrapWithNettyLeakDetection(repetitions = 1) - @Tag("longTest") public void testThatPeerResetTriggersFinalizeFuture(boolean useTls, boolean withServerReadTimeout) throws Exception { final var RESPONSE_TIMEOUT_FOR_HUNG_TEST = Duration.ofMillis(500); @@ -285,6 +284,7 @@ private void testPeerResets( @ParameterizedTest @CsvSource({ "false, false", "false, true", "true, false", "true, true" }) + @WrapWithNettyLeakDetection(repetitions = 1) @Tag("longTest") public void testThatConnectionsAreKeptAliveAndShared(boolean useTls, boolean largeResponse) throws Exception { try ( diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java index eb31a7762..5b878a287 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java @@ -234,7 +234,7 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { @Test @ResourceLock("TrafficReplayerRunner") - @Tag("longTest") + @Tag("isolatedTest") public void fullTestWithThrottledStart() throws Throwable { var random = new Random(1); try ( @@ -357,7 +357,7 @@ public void makeSureThatCollateralDamageDoesntFreezeTests() throws Throwable { @ParameterizedTest @CsvSource(value = { "3,false", "-1,false", "3,true", "-1,true", }) - @Tag("longTest") + @Tag("isolatedTest") @ResourceLock("TrafficReplayerRunner") public void fullTestWithRestarts(int testSize, boolean randomize) throws Throwable { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java index 77ad252b0..4298552e5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java @@ -90,7 +90,7 @@ public Consumer get() { @ParameterizedTest @CsvSource(value = { "3,false", "-1,false", "3,true", "-1,true", }) - @Tag("longTest") + @Tag("isolatedTest") @ResourceLock("TrafficReplayerRunner") public void fullTest(int testSize, boolean randomize) throws Throwable { var random = new Random(1); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java index c601597bb..a77c41bb3 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java @@ -149,8 +149,8 @@ public void testPersistentRequestFailuresAreRetriedThenFailed() throws Exception } } - @Tag("longTest") @Test + @Tag("longTest") @WrapWithNettyLeakDetection(disableLeakChecks = true) // code is forcibly terminated so leaks are expected public void testConnectionFailuresNeverGiveUp() throws Exception { URI serverUri; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java index 2a824cb33..8aef4ed69 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/kafka/KafkaTrafficCaptureSourceLongTermTest.java @@ -36,7 +36,7 @@ public class KafkaTrafficCaptureSourceLongTermTest extends InstrumentationTest { ); @Test - @Tag("longTest") + @Tag("isolatedTest") public void testTrafficCaptureSource() throws Exception { String testTopicName = "TEST_TOPIC"; diff --git a/build.gradle b/build.gradle index 607e668bd..9840896f0 100644 --- a/build.gradle +++ b/build.gradle @@ -71,6 +71,7 @@ subprojects { showStackTraces true showStandardStreams = true } + maxParallelForks = gradle.startParameter.maxWorkerCount // Provide way to exclude particular tests from CLI // e.g. ../gradlew test -PexcludeTests=**/KafkaProtobufConsumerLongTermTest* @@ -84,21 +85,31 @@ subprojects { systemProperty 'log4j2.contextSelector', 'org.apache.logging.log4j.core.selector.BasicContextSelector' // Verify assertions in tests jvmArgs '-ea' + jacoco.enabled = true } // Mutually exclusive tests to avoid duplication tasks.named('test') { useJUnitPlatform { excludeTags 'longTest' + excludeTags 'isolatedTest' } - jacoco.enabled = true } tasks.register('slowTest', Test) { useJUnitPlatform { includeTags 'longTest' + excludeTags 'isolatedTest' + } + } + + tasks.register('isolatedTest', Test) { + dependsOn test + dependsOn slowTest + maxParallelForks = 1 + useJUnitPlatform { + includeTags 'isolatedTest' } - jacoco.enabled = true } task javadocJar(type: Jar, dependsOn: javadoc) { @@ -200,6 +211,35 @@ subprojects { } } +gradle.projectsEvaluated { + List isolatedTestsTasks = [] + List sharedProcessTestsTasks = [] + subprojects { subproject -> + subproject.tasks.withType(Test).all { task -> + if (task.name == "isolatedTest") { + isolatedTestsTasks.add(task) + } else { + sharedProcessTestsTasks.add(task) + } + } + } + isolatedTestsTasks.sort { task -> task.project.name } + + // Create a sequential dependency chain + Task previousTask = null + isolatedTestsTasks.each { task -> + sharedProcessTestsTasks.forEach {task.mustRunAfter(it) } + if (previousTask != null) { + task.mustRunAfter(previousTask) + } + previousTask = task + } + + tasks.register("allTests") { + dependsOn isolatedTestsTasks + } +} + task mergeJacocoReports(type: JacocoReport) { def jacocoReportTasks = subprojects.collect { it.tasks.withType(JacocoReport).matching { it.name == "jacocoTestReport" } }.flatten() dependsOn jacocoReportTasks From e6214bf87e54c481101af4deb94cc3acf246f124 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 11 Sep 2024 14:28:21 -0400 Subject: [PATCH 07/38] Classify a few more tests that were having issues running concurrently as isolated. Set excludeTags in one command rather than two. With two, I think that one was overwriting the other. Signed-off-by: Greg Schohn --- .../src/test/java/com/rfs/PerformanceVerificationTest.java | 2 ++ RFS/gradle.properties | 6 ------ RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java | 2 ++ TrafficCapture/gradle.properties | 7 ------- .../replay/e2etests/FullReplayerWithTracingChecksTest.java | 3 ++- build.gradle | 3 +-- 6 files changed, 7 insertions(+), 16 deletions(-) delete mode 100644 RFS/gradle.properties delete mode 100644 TrafficCapture/gradle.properties diff --git a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/PerformanceVerificationTest.java b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/PerformanceVerificationTest.java index 1d1d26e24..fe0e505ca 100644 --- a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/PerformanceVerificationTest.java +++ b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/PerformanceVerificationTest.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.util.BytesRef; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.opensearch.migrations.reindexer.tracing.IDocumentMigrationContexts; @@ -37,6 +38,7 @@ public class PerformanceVerificationTest { @Test + @Tag("isolatedTest") void testDocumentBuffering() throws Exception { // Create an in-memory directory for the test ByteBuffersDirectory inMemoryDir = new ByteBuffersDirectory(); diff --git a/RFS/gradle.properties b/RFS/gradle.properties deleted file mode 100644 index 18f452c73..000000000 --- a/RFS/gradle.properties +++ /dev/null @@ -1,6 +0,0 @@ -# This file was generated by the Gradle 'init' task. -# https://docs.gradle.org/current/userguide/build_environment.html#sec:gradle_configuration_properties - -org.gradle.parallel=true -org.gradle.caching=true - diff --git a/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java b/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java index 0492f9ac5..e2c594c70 100644 --- a/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java +++ b/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java @@ -13,6 +13,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; @@ -117,6 +118,7 @@ public void testAcquireLeaseHasNoUnnecessaryConflicts() throws Exception { } @Test + @Tag("isolatedTest") public void testAcquireLeaseForQuery() throws Exception { var testContext = WorkCoordinationTestContext.factory().withAllTracking(); final var NUM_DOCS = 40; diff --git a/TrafficCapture/gradle.properties b/TrafficCapture/gradle.properties deleted file mode 100644 index 5ca1b8ea9..000000000 --- a/TrafficCapture/gradle.properties +++ /dev/null @@ -1,7 +0,0 @@ -org.gradle.caching=true -org.gradle.configuration-cache=true -org.gradle.configureondemand=true - -# Set Gradle Daemon's idle timeout to 30 minutes -org.gradle.daemon.idletimeout=1800000 -org.gradle.parallel=true \ No newline at end of file diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java index 3be4b095c..84d1715e9 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java @@ -52,7 +52,8 @@ protected TestContext makeInstrumentationContext() { @ParameterizedTest @ValueSource(ints = { 1, 2 }) @ResourceLock("TrafficReplayerRunner") - @Tag("longTest") + // run in isolation to reduce the chance that there's a broken connection, upsetting the tcpConnection count check + @Tag("isolatedTest") public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) throws Throwable { var random = new Random(1); try ( diff --git a/build.gradle b/build.gradle index 9840896f0..d4b837651 100644 --- a/build.gradle +++ b/build.gradle @@ -91,8 +91,7 @@ subprojects { // Mutually exclusive tests to avoid duplication tasks.named('test') { useJUnitPlatform { - excludeTags 'longTest' - excludeTags 'isolatedTest' + excludeTags('longTest', 'isolatedTest') } } From 10cf0296507dbad45e6f5fbc9df6149d9778dc9e Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 11 Sep 2024 15:56:09 -0400 Subject: [PATCH 08/38] Remove configure on demand from gradle properties. Not sure what it does, it's marked as experimental. When it's on, I've noticed local rebuilds aren't rebuilt with changes. Signed-off-by: Greg Schohn --- gradle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradle.properties b/gradle.properties index 9de85ba77..a7d29129d 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,6 +1,6 @@ org.gradle.caching=true org.gradle.configuration-cache=true -org.gradle.configureondemand=true +#org.gradle.configureondemand=true # Set Gradle Daemon's idle timeout to 30 minutes org.gradle.daemon.idletimeout=1800000 org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 From 54822a9c3ca5e47e4d5b2fa773682c1883318a9b Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 11 Sep 2024 23:36:03 -0400 Subject: [PATCH 09/38] Run all full traffic tests as longTests (slowTest) instead of isolated. Signed-off-by: Greg Schohn --- .../replay/e2etests/FullReplayerWithTracingChecksTest.java | 2 +- .../migrations/replay/e2etests/FullTrafficReplayerTest.java | 4 ++-- .../replay/e2etests/KafkaRestartingTrafficReplayerTest.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java index 84d1715e9..4d7f7d760 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullReplayerWithTracingChecksTest.java @@ -53,7 +53,7 @@ protected TestContext makeInstrumentationContext() { @ValueSource(ints = { 1, 2 }) @ResourceLock("TrafficReplayerRunner") // run in isolation to reduce the chance that there's a broken connection, upsetting the tcpConnection count check - @Tag("isolatedTest") + @Tag("longTest") public void testStreamWithRequestsWithCloseIsCommittedOnce(int numRequests) throws Throwable { var random = new Random(1); try ( diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java index 5b878a287..b495293d5 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java @@ -234,7 +234,7 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { @Test @ResourceLock("TrafficReplayerRunner") - @Tag("isolatedTest") + @Tag("slowTest") public void fullTestWithThrottledStart() throws Throwable { var random = new Random(1); try ( @@ -357,7 +357,7 @@ public void makeSureThatCollateralDamageDoesntFreezeTests() throws Throwable { @ParameterizedTest @CsvSource(value = { "3,false", "-1,false", "3,true", "-1,true", }) - @Tag("isolatedTest") + @Tag("slowTest") @ResourceLock("TrafficReplayerRunner") public void fullTestWithRestarts(int testSize, boolean randomize) throws Throwable { diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java index 4298552e5..77ad252b0 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/KafkaRestartingTrafficReplayerTest.java @@ -90,7 +90,7 @@ public Consumer get() { @ParameterizedTest @CsvSource(value = { "3,false", "-1,false", "3,true", "-1,true", }) - @Tag("isolatedTest") + @Tag("longTest") @ResourceLock("TrafficReplayerRunner") public void fullTest(int testSize, boolean randomize) throws Throwable { var random = new Random(1); From 527ec4e1e3704878c856fc075f4ba5f5790cbf39 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 12 Sep 2024 08:04:17 -0400 Subject: [PATCH 10/38] Decouple the test and slowTests as dependencies for isolatedTests. isolatedTests should still run after any of those other test tasks that are to be run, but the other tests are no longer required. For a given project, to run all types of tests, there's a new fullTest target. The global allTests target is still present to run all tests across all projects. Signed-off-by: Greg Schohn --- build.gradle | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index d4b837651..ecde254a7 100644 --- a/build.gradle +++ b/build.gradle @@ -103,14 +103,18 @@ subprojects { } tasks.register('isolatedTest', Test) { - dependsOn test - dependsOn slowTest maxParallelForks = 1 useJUnitPlatform { includeTags 'isolatedTest' } } + tasks.register('fullTest') { + dependsOn test + dependsOn slowTest + dependsOn isolatedTest + } + task javadocJar(type: Jar, dependsOn: javadoc) { archiveClassifier.set('javadoc') from javadoc.destinationDir @@ -235,6 +239,7 @@ gradle.projectsEvaluated { } tasks.register("allTests") { + dependsOn sharedProcessTestsTasks dependsOn isolatedTestsTasks } } From d8acd47d14b79b95da57551eae8dc1cd2c7f5772 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 13 Sep 2024 00:58:46 -0400 Subject: [PATCH 11/38] Experimenting with handlers to add and remove headers from http requests. Signed-off-by: Greg Schohn --- .../proxyserver/netty/HeaderAdderHandler.java | 41 +++++++ .../netty/HeaderRemoverHandler.java | 111 ++++++++++++++++++ .../netty/HeaderAdderHandlerTest.java | 62 ++++++++++ .../netty/HeaderRemoverHandlerTest.java | 59 ++++++++++ .../proxyserver/netty/MatcherTest.java | 57 +++++++++ 5 files changed, 330 insertions(+) create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java new file mode 100644 index 000000000..b638f0039 --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java @@ -0,0 +1,41 @@ +package org.opensearch.migrations.trafficcapture.proxyserver.netty; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; + +public class HeaderAdderHandler extends ChannelInboundHandlerAdapter { + boolean insertedHeader = false; + private final ByteBuf headerLineToAdd; + + public HeaderAdderHandler(ByteBuf headerLineToAdd) { + this.headerLineToAdd = headerLineToAdd.retain(); + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (!(msg instanceof ByteBuf) || insertedHeader) { + super.channelRead(ctx, msg); + return; + } + var buf = (ByteBuf) msg; + buf.markReaderIndex(); + while (buf.isReadable()) { + if (buf.readByte() == '\n') { + final var upToIndex = buf.readerIndex(); + var composite = Unpooled.compositeBuffer(3); + buf.resetReaderIndex(); + composite.addComponent(true, buf.retainedSlice(0, upToIndex)); + composite.addComponent(true, headerLineToAdd.duplicate()); + composite.addComponent(true, buf.retainedSlice(upToIndex, buf.readableBytes()-upToIndex)); + buf.release(); + super.channelRead(ctx, composite); + insertedHeader = true; + return; + } + } + buf.resetReaderIndex(); + super.channelRead(ctx, msg); + } +} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java new file mode 100644 index 000000000..f636d1eea --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java @@ -0,0 +1,111 @@ +package org.opensearch.migrations.trafficcapture.proxyserver.netty; + +import java.util.List; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import lombok.SneakyThrows; + +public class HeaderRemoverHandler extends ChannelInboundHandlerAdapter { + final String headerToRemove; + CompositeByteBuf previousRemaining; + // This handler has 3 states - copying, dropping, or testing. when previousRemaining != null, we're testing. + // when dropUntilNewline == true, we're dropping, otherwise, we're copying (when previousRemaining==null) + // The starting state is previousRemaining == null and dropUntilNewline = false + boolean dropUntilNewline; + + public HeaderRemoverHandler(String headerToRemove) { + if (!headerToRemove.endsWith(":")) { + throw new IllegalArgumentException("The headerToRemove must end with a ':'"); + } + this.headerToRemove = headerToRemove; + } + + @SneakyThrows + void lambdaSafeSuperChannelRead(ChannelHandlerContext ctx, ByteBuf bb) { + super.channelRead(ctx, bb); + } + + /** + * @return true if there's a discongruity in the incoming buf and the contents that preceded this call will + * need to be buffered by the caller + */ + boolean matchNextBytes(ChannelHandlerContext ctx, ByteBuf buf) { + if (!buf.isReadable()) { + return false; + } + buf.markReaderIndex(); + for (int i=previousRemaining.readerIndex(); ; ++i) { + if (!buf.isReadable()) { // partial match + previousRemaining.addComponent(true, buf); + return true; + } + if (i == headerToRemove.length()) { // match! + previousRemaining.release(); // drop those in limbo ... + previousRemaining = null; + dropUntilNewline = true; // ... plus other bytes until we reset + return true; + } + if (Character.toLowerCase(headerToRemove.charAt(i)) != Character.toLowerCase(buf.readByte())) { // no match + previousRemaining.forEach(bb -> lambdaSafeSuperChannelRead(ctx, bb)); + previousRemaining = null; + dropUntilNewline = false; + return false; + } + } + + } + + boolean advanceByteBufUntilNewline(ByteBuf bb) { + while (bb.isReadable()) { // sonar lint doesn't like if the while statement has an empty body + if (bb.readByte() != '\n') { return true; } + } + return false; + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + if (!(msg instanceof ByteBuf)) { + super.channelRead(ctx, msg); + return; + } + + var sourceBuf = (ByteBuf) msg; + var startForNextSourceSegment = sourceBuf.readerIndex(); + var cleanedIncomingBuf = ctx.alloc().compositeBuffer(4); + + while (true) { + if (previousRemaining != null) { + final var sourceReaderIdx = sourceBuf.readerIndex(); + if (matchNextBytes(ctx, sourceBuf.slice(sourceReaderIdx, sourceBuf.readableBytes())) && + sourceReaderIdx != startForNextSourceSegment) // would be 0-length + { + cleanedIncomingBuf.addComponent(true, + sourceBuf.retainedSlice(startForNextSourceSegment, sourceReaderIdx)); + startForNextSourceSegment = -1; + } + } else { + var foundNewline = advanceByteBufUntilNewline(sourceBuf); + if (dropUntilNewline) { + if (foundNewline) { + // took care of previous bytes in the source buffer in the previousRemaining != null branch + startForNextSourceSegment = sourceBuf.readerIndex(); + } + } + if (foundNewline) { + previousRemaining = ctx.alloc().compositeBuffer(16); + } else { + break; + } + } + } + if (startForNextSourceSegment >= 0) { + cleanedIncomingBuf.addComponent(true, + sourceBuf.retainedSlice(startForNextSourceSegment, sourceBuf.readerIndex()-startForNextSourceSegment)); + } + super.channelRead(ctx, cleanedIncomingBuf); + } +} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java new file mode 100644 index 000000000..0cfea4e48 --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java @@ -0,0 +1,62 @@ +package org.opensearch.migrations.trafficcapture.proxyserver.netty; + +import java.nio.charset.StandardCharsets; +import java.util.stream.Stream; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.embedded.EmbeddedChannel; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import static org.junit.jupiter.api.Assertions.*; + +class HeaderAdderHandlerTest { + + public static Stream makeArgs() { + return Stream.of( + Arguments.of("\n"), + Arguments.of("\r\n")); + } + + @ParameterizedTest + @MethodSource("makeArgs") + public void simpleCheck(String lineEnding) { + var extraHeader = "host: my.host\n"; + var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); + final var msg = makeMessage(lineEnding, ""); + + var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); + channel.writeInbound(Unpooled.wrappedBuffer(msg.getBytes(StandardCharsets.UTF_8))); + var output = Unpooled.compositeBuffer(); + channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + + Assertions.assertEquals(makeMessage(lineEnding, extraHeader), output.toString(StandardCharsets.UTF_8)); + } + + @ParameterizedTest + @MethodSource("makeArgs") + public void fragmentedCheck(String lineEnding) { + var extraHeader = "host: my.host\n"; + var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); + final var msg = makeMessage(lineEnding, ""); + + var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); + msg.chars().forEach(c -> channel.writeInbound(Unpooled.wrappedBuffer(new byte[]{(byte) c}))); + var output = Unpooled.compositeBuffer(); + channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + + Assertions.assertEquals(makeMessage(lineEnding, extraHeader), output.toString(StandardCharsets.UTF_8)); + } + + String makeMessage(String lineEnding, String extraHeader) { + return "GET / HTTP/1.1" + lineEnding + + extraHeader + + "NICEHeader: v1" + lineEnding + + "silLYHeader: yyy" + lineEnding + + lineEnding; + } +} \ No newline at end of file diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java new file mode 100644 index 000000000..fc8285e33 --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java @@ -0,0 +1,59 @@ +package org.opensearch.migrations.trafficcapture.proxyserver.netty; + +import java.nio.charset.StandardCharsets; +import java.util.stream.Stream; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.embedded.EmbeddedChannel; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +class HeaderRemoverHandlerTest { + + public static Stream makeArgs() { + return Stream.of( + Arguments.of("\n"), + Arguments.of("\r\n")); + } + + @ParameterizedTest + @MethodSource("makeArgs") + public void simpleCheck(String lineEnding) { + var extraHeader = "host: my.host\n"; + var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); + final var msg = makeMessage(lineEnding, true); + + var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); + channel.writeInbound(Unpooled.wrappedBuffer(msg.getBytes(StandardCharsets.UTF_8))); + var output = Unpooled.compositeBuffer(); + channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + + Assertions.assertEquals(makeMessage(lineEnding, false), output.toString(StandardCharsets.UTF_8)); + } + + @ParameterizedTest + @MethodSource("makeArgs") + public void fragmentedCheck(String lineEnding) { + var headerToRemove = "host"; + final var msg = makeMessage(lineEnding, true); + + var channel = new EmbeddedChannel(new HeaderRemoverHandler(headerToRemove)); + msg.chars().forEach(c -> channel.writeInbound(Unpooled.wrappedBuffer(new byte[]{(byte) c}))); + var output = Unpooled.compositeBuffer(); + channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + + Assertions.assertEquals(makeMessage(lineEnding, false), output.toString(StandardCharsets.UTF_8)); + } + + String makeMessage(String lineEnding, boolean withHosts) { + return "GET / HTTP/1.1" + lineEnding + + "hoststays: v1" + lineEnding + + (withHosts ? ("HOST: begone" + lineEnding) : "") + + "different: v1" + lineEnding + + (withHosts ? ("HosT: begone" + lineEnding) : "") + + lineEnding; + } +} \ No newline at end of file diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java new file mode 100644 index 000000000..560555d5a --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java @@ -0,0 +1,57 @@ +package org.opensearch.migrations.trafficcapture.proxyserver.netty; + +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.regex.Pattern; + +import com.google.common.base.Strings; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +@Slf4j +public class MatcherTest { + + public static final ByteBuf BIG_BUF = + Unpooled.wrappedBuffer(Strings.repeat("ha", 100_000).getBytes(StandardCharsets.UTF_8)); + public static final ByteBuf SMALL_BUF = + Unpooled.wrappedBuffer(Strings.repeat("ha", 1).getBytes(StandardCharsets.UTF_8)); + + @Test + public void test() { + var p = Pattern.compile("^host:.*", Pattern.CASE_INSENSITIVE); + + Assertions.assertTrue( + bufMatches(p, Unpooled.wrappedBuffer("host: MYHOST".getBytes(StandardCharsets.UTF_8)))); + + getMatchTime(p, BIG_BUF, 1000); + getMatchTime(p, BIG_BUF, 1000); + + for (int i=0; i<1; ++i) { + final var MATCH_REPS = 100_000_000; + var smallTime = getMatchTime(p, SMALL_BUF, MATCH_REPS); + var bigTime = getMatchTime(p, BIG_BUF, MATCH_REPS); + log.info("smallTime = "+smallTime); + log.info("bigTime = "+bigTime); + } + } + + private static Duration getMatchTime(Pattern p, ByteBuf input, int i) { + final var start = System.nanoTime(); + boolean didMatch = false; + for (; i > 0; --i) { + didMatch |= bufMatches(p, input); + } + try { + return Duration.ofNanos(System.nanoTime() - start); + } finally { + Assertions.assertFalse(didMatch); + } + } + + public static boolean bufMatches(Pattern p, ByteBuf b) { + return p.matcher(b.getCharSequence(0, b.readableBytes(),StandardCharsets.UTF_8)).matches(); + } +} From d1d2be0b63d8b935a3d39b930a93573324557ec4 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 13 Sep 2024 09:45:09 -0400 Subject: [PATCH 12/38] Bugfixes for proxy header translations and better tests Signed-off-by: Greg Schohn --- .../netty/HeaderRemoverHandler.java | 60 ++++--- .../netty/HeaderAdderHandlerTest.java | 72 +++++--- .../netty/HeaderRemoverHandlerTest.java | 156 ++++++++++++++---- 3 files changed, 209 insertions(+), 79 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java index f636d1eea..d47f9f4ee 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; +import java.nio.charset.StandardCharsets; import java.util.List; import io.netty.buffer.ByteBuf; @@ -7,8 +8,11 @@ import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.util.ReferenceCountUtil; import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class HeaderRemoverHandler extends ChannelInboundHandlerAdapter { final String headerToRemove; CompositeByteBuf previousRemaining; @@ -34,13 +38,11 @@ void lambdaSafeSuperChannelRead(ChannelHandlerContext ctx, ByteBuf bb) { * need to be buffered by the caller */ boolean matchNextBytes(ChannelHandlerContext ctx, ByteBuf buf) { - if (!buf.isReadable()) { - return false; - } - buf.markReaderIndex(); - for (int i=previousRemaining.readerIndex(); ; ++i) { + final var sourceReaderIdx = buf.readerIndex(); + for (int i=previousRemaining.writerIndex(); ; ++i) { if (!buf.isReadable()) { // partial match - previousRemaining.addComponent(true, buf); + previousRemaining.addComponent(true, + buf.retainedSlice(sourceReaderIdx, i-previousRemaining.writerIndex())); return true; } if (i == headerToRemove.length()) { // match! @@ -49,19 +51,22 @@ boolean matchNextBytes(ChannelHandlerContext ctx, ByteBuf buf) { dropUntilNewline = true; // ... plus other bytes until we reset return true; } + buf.markReaderIndex(); if (Character.toLowerCase(headerToRemove.charAt(i)) != Character.toLowerCase(buf.readByte())) { // no match previousRemaining.forEach(bb -> lambdaSafeSuperChannelRead(ctx, bb)); + previousRemaining.removeComponents(0, previousRemaining.numComponents()); + previousRemaining.release(); previousRemaining = null; + buf.resetReaderIndex(); dropUntilNewline = false; return false; } } - } boolean advanceByteBufUntilNewline(ByteBuf bb) { while (bb.isReadable()) { // sonar lint doesn't like if the while statement has an empty body - if (bb.readByte() != '\n') { return true; } + if (bb.readByte() == '\n') { return true; } } return false; } @@ -74,38 +79,41 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception } var sourceBuf = (ByteBuf) msg; - var startForNextSourceSegment = sourceBuf.readerIndex(); + var currentSourceSegmentStart = (previousRemaining != null || dropUntilNewline) ? -1 : sourceBuf.readerIndex(); var cleanedIncomingBuf = ctx.alloc().compositeBuffer(4); - while (true) { + while (sourceBuf.isReadable()) { if (previousRemaining != null) { final var sourceReaderIdx = sourceBuf.readerIndex(); - if (matchNextBytes(ctx, sourceBuf.slice(sourceReaderIdx, sourceBuf.readableBytes())) && - sourceReaderIdx != startForNextSourceSegment) // would be 0-length - { - cleanedIncomingBuf.addComponent(true, - sourceBuf.retainedSlice(startForNextSourceSegment, sourceReaderIdx)); - startForNextSourceSegment = -1; - } - } else { - var foundNewline = advanceByteBufUntilNewline(sourceBuf); - if (dropUntilNewline) { - if (foundNewline) { - // took care of previous bytes in the source buffer in the previousRemaining != null branch - startForNextSourceSegment = sourceBuf.readerIndex(); + if (matchNextBytes(ctx, sourceBuf)) { + if (currentSourceSegmentStart >= 0 && + sourceReaderIdx != currentSourceSegmentStart) // would be 0-length + { + cleanedIncomingBuf.addComponent(true, + sourceBuf.retainedSlice(currentSourceSegmentStart, sourceReaderIdx-currentSourceSegmentStart)); + currentSourceSegmentStart = -1; } + } else if (currentSourceSegmentStart == -1) { + currentSourceSegmentStart = sourceReaderIdx; } - if (foundNewline) { + } else { + if (advanceByteBufUntilNewline(sourceBuf)) { previousRemaining = ctx.alloc().compositeBuffer(16); } else { break; } } } - if (startForNextSourceSegment >= 0) { + if (currentSourceSegmentStart >= 0) { cleanedIncomingBuf.addComponent(true, - sourceBuf.retainedSlice(startForNextSourceSegment, sourceBuf.readerIndex()-startForNextSourceSegment)); + sourceBuf.retainedSlice(currentSourceSegmentStart, sourceBuf.readerIndex()-currentSourceSegmentStart)); } super.channelRead(ctx, cleanedIncomingBuf); } + + @Override + public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { + ReferenceCountUtil.release(previousRemaining); + super.channelUnregistered(ctx); + } } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java index 0cfea4e48..348461b15 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java @@ -1,57 +1,81 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; import java.nio.charset.StandardCharsets; -import java.util.stream.Stream; +import java.util.Arrays; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; +import java.util.stream.IntStream; + +import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; +import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.params.provider.ValueSource; +@WrapWithNettyLeakDetection() +@Slf4j class HeaderAdderHandlerTest { - public static Stream makeArgs() { - return Stream.of( - Arguments.of("\n"), - Arguments.of("\r\n")); + private void runTestsWithSize(Supplier sizesSupplier) { + runTestWithSize("\n", sizesSupplier.get()); + runTestWithSize("\r\n", sizesSupplier.get()); } - @ParameterizedTest - @MethodSource("makeArgs") - public void simpleCheck(String lineEnding) { - var extraHeader = "host: my.host\n"; - var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); - final var msg = makeMessage(lineEnding, ""); - - var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); - channel.writeInbound(Unpooled.wrappedBuffer(msg.getBytes(StandardCharsets.UTF_8))); - var output = Unpooled.compositeBuffer(); - channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + @Test + public void simpleCheck() { + runTestsWithSize(() -> IntStream.of(Integer.MAX_VALUE)); + } - Assertions.assertEquals(makeMessage(lineEnding, extraHeader), output.toString(StandardCharsets.UTF_8)); + @Test + public void individualBytesCheck() { + runTestsWithSize(() -> IntStream.generate(()->1)); } @ParameterizedTest - @MethodSource("makeArgs") - public void fragmentedCheck(String lineEnding) { + @ValueSource(strings = { + "8,27,9999", + "8,12,16,999" + }) + public void fragmentedBytesCheck(String sizesStr) { + runTestsWithSize(() -> Arrays.stream(sizesStr.split(",")).mapToInt(Integer::parseInt)); + } + + private void runTestWithSize(String lineEnding, IntStream sizes) { var extraHeader = "host: my.host\n"; var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); final var msg = makeMessage(lineEnding, ""); var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); - msg.chars().forEach(c -> channel.writeInbound(Unpooled.wrappedBuffer(new byte[]{(byte) c}))); + sliceMessageIntoChannelWrites(channel, msg, sizes); var output = Unpooled.compositeBuffer(); channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + channel.close(); Assertions.assertEquals(makeMessage(lineEnding, extraHeader), output.toString(StandardCharsets.UTF_8)); } + public static void sliceMessageIntoChannelWrites(EmbeddedChannel channel, String msg, IntStream sizes) { + final var lastStart = new AtomicInteger(); + sizes + .mapToObj(len -> { + var startIdx = lastStart.get(); + if (startIdx >= msg.length()) { return null; } + var endIdx = startIdx + len; + var substr = msg.substring(lastStart.get(), Math.min(endIdx, msg.length())); + lastStart.set(endIdx); + log.atTrace().setMessage(() -> "s: " + substr).log(); + return substr; + }) + .takeWhile(Objects::nonNull) + .forEach(substr -> channel.writeInbound(Unpooled.wrappedBuffer(substr.getBytes(StandardCharsets.UTF_8)))); + } + String makeMessage(String lineEnding, String extraHeader) { return "GET / HTTP/1.1" + lineEnding + extraHeader + diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java index fc8285e33..e48bc32d8 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java @@ -1,59 +1,157 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; import java.nio.charset.StandardCharsets; -import java.util.stream.Stream; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Random; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.util.ReferenceCountUtil; +import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; +@WrapWithNettyLeakDetection() +@Slf4j class HeaderRemoverHandlerTest { - public static Stream makeArgs() { - return Stream.of( - Arguments.of("\n"), - Arguments.of("\r\n")); + private static final int NUM_RANDOM_RUNS = 1_000; + + private void runTestsWithSize(BiFunction msgMaker, Supplier sizesSupplier) { + log.atDebug().setMessage(() -> + "sizes: " + sizesSupplier.get().limit(16).mapToObj(i->""+i) + .collect(Collectors.joining(","))).log(); + runTestWithSize(b -> msgMaker.apply(b,"\n"), sizesSupplier.get()); + runTestWithSize(b -> msgMaker.apply(b, "\r\n"), sizesSupplier.get()); } - @ParameterizedTest - @MethodSource("makeArgs") - public void simpleCheck(String lineEnding) { - var extraHeader = "host: my.host\n"; - var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); - final var msg = makeMessage(lineEnding, true); - - var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); - channel.writeInbound(Unpooled.wrappedBuffer(msg.getBytes(StandardCharsets.UTF_8))); + public void runTestWithSize(Function messageMaker, IntStream sizes) { + final var sourceMsg = messageMaker.apply(true); + + var channel = new EmbeddedChannel(new HeaderRemoverHandler("host:")); + HeaderAdderHandlerTest.sliceMessageIntoChannelWrites(channel, sourceMsg, sizes); var output = Unpooled.compositeBuffer(); - channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + channel.inboundMessages().forEach(v -> output.addComponent(true, ((ByteBuf) v))); + channel.close(); + + Assertions.assertEquals(messageMaker.apply(false), output.toString(StandardCharsets.UTF_8), + "Error converting source message: " + sourceMsg); + output.release(); + } + + @Test + public void throwsOnHostFormatError() { + Assertions.assertThrows(IllegalArgumentException.class, () -> new HeaderRemoverHandler("host")); + Assertions.assertThrows(IllegalArgumentException.class, () -> new HeaderRemoverHandler("h: ")); + } + + @Test + public void simpleCheck() { + runTestsWithSize(HeaderRemoverHandlerTest::makeInterlacedMessage, () -> IntStream.of(Integer.MAX_VALUE)); + } - Assertions.assertEquals(makeMessage(lineEnding, false), output.toString(StandardCharsets.UTF_8)); + @Test + public void trivialSingleByte() { + runTestsWithSize((x,y) -> "A", () -> IntStream.generate(() -> 1)); + } + + @Test + public void individualBytesCheck() { + runTestsWithSize(HeaderRemoverHandlerTest::makeThinMessage, () -> IntStream.generate(() -> 1)); + runTestsWithSize(HeaderRemoverHandlerTest::makeInterlacedMessage, () -> IntStream.generate(() -> 1)); + runTestsWithSize(HeaderRemoverHandlerTest::makeConsecutiveMessage, () -> IntStream.generate(() -> 1)); } @ParameterizedTest - @MethodSource("makeArgs") - public void fragmentedCheck(String lineEnding) { - var headerToRemove = "host"; - final var msg = makeMessage(lineEnding, true); + @ValueSource(strings = { + "8,22,22,22,22,9999" + }) + public void fragmentedCheckInterlaced(String sizesStr) { + runTestsWithSize(HeaderRemoverHandlerTest::makeInterlacedMessage, + () -> Arrays.stream(sizesStr.split(",")).mapToInt(Integer::parseInt)); + } - var channel = new EmbeddedChannel(new HeaderRemoverHandler(headerToRemove)); - msg.chars().forEach(c -> channel.writeInbound(Unpooled.wrappedBuffer(new byte[]{(byte) c}))); - var output = Unpooled.compositeBuffer(); - channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); + @Test + @WrapWithNettyLeakDetection(repetitions = 1) + public void randomFragmentedCheckInterlaced() { + final var bound = getBound(HeaderRemoverHandlerTest::makeInterlacedMessage); + for (int i=0; i IntStream.generate(() -> r.nextInt(bound))); + } + } - Assertions.assertEquals(makeMessage(lineEnding, false), output.toString(StandardCharsets.UTF_8)); + @ParameterizedTest + @ValueSource(strings = { + "8,22,22,22,22,9999" + }) + public void fragmentedCheckConsecutive(String sizesStr) { + runTestsWithSize(HeaderRemoverHandlerTest::makeConsecutiveMessage, + () -> Arrays.stream(sizesStr.split(",")).mapToInt(Integer::parseInt)); } - String makeMessage(String lineEnding, boolean withHosts) { + @Test + @WrapWithNettyLeakDetection(repetitions = 1) + public void randomFragmentedCheckConsecutive() { + final var bound = getBound(HeaderRemoverHandlerTest::makeConsecutiveMessage); + for (int i=0; i IntStream.generate(() -> r.nextInt(bound))); + } + } + + private int getBound(BiFunction makeInterlacedMessage) { + return Arrays.stream(makeInterlacedMessage.apply(true, "\n").split("\n")) + .mapToInt(String::length) + .map(x->x*2) + .max() + .orElseThrow(() -> new IllegalStateException("No lines in the sample")); + } + + static String makeInterlacedMessage(boolean withHosts, String lineEnding) { return "GET / HTTP/1.1" + lineEnding + "hoststays: v1" + lineEnding + (withHosts ? ("HOST: begone" + lineEnding) : "") + - "different: v1" + lineEnding + + "different: v2" + lineEnding + + (withHosts ? ("HosT: begone" + lineEnding) : "") + + "keeper: v3" + lineEnding + + lineEnding; + } + + static String makeThinMessage(boolean withHosts, String lineEnding) { + return "G" + lineEnding + + "h: a" + lineEnding + + (withHosts ? ("HOST: b" + lineEnding) : "") + + "d: c" + lineEnding + + (withHosts ? ("HosT: e" + lineEnding) : "") + + lineEnding; + } + + + static String makeConsecutiveMessage(boolean withHosts, String lineEnding) { + return "GET / HTTP/1.1" + lineEnding + + "hoststays: a1" + lineEnding + + "different: b2" + lineEnding + + (withHosts ? ("HOST: strike" + lineEnding) : "") + (withHosts ? ("HosT: begone" + lineEnding) : "") + + "e2: c3" + lineEnding + + "hos: d4" + lineEnding + + (withHosts ? ("HOST: foo" + lineEnding) : "") + + (withHosts ? ("HosT: bar" + lineEnding) : "") + + "X: Y" + lineEnding + lineEnding; } } \ No newline at end of file From 12c2207ead2edacf259fc9dac0cebccae3723841 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 13 Sep 2024 17:33:41 -0400 Subject: [PATCH 13/38] Create POOLED buffers in the proxy translation handler unit tests. That tweaked a couple other refCounting errors that are also fixed here. Signed-off-by: Greg Schohn --- .../netty/HeaderRemoverHandler.java | 34 +++++++++++++------ .../netty/HeaderAdderHandlerTest.java | 11 ++++-- .../netty/HeaderRemoverHandlerTest.java | 14 ++++---- 3 files changed, 38 insertions(+), 21 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java index d47f9f4ee..56accf96b 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java @@ -1,11 +1,7 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; -import java.nio.charset.StandardCharsets; -import java.util.List; - import io.netty.buffer.ByteBuf; import io.netty.buffer.CompositeByteBuf; -import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.util.ReferenceCountUtil; @@ -53,7 +49,7 @@ boolean matchNextBytes(ChannelHandlerContext ctx, ByteBuf buf) { } buf.markReaderIndex(); if (Character.toLowerCase(headerToRemove.charAt(i)) != Character.toLowerCase(buf.readByte())) { // no match - previousRemaining.forEach(bb -> lambdaSafeSuperChannelRead(ctx, bb)); + previousRemaining.forEach(bb -> lambdaSafeSuperChannelRead(ctx, bb.retain())); previousRemaining.removeComponents(0, previousRemaining.numComponents()); previousRemaining.release(); previousRemaining = null; @@ -71,6 +67,18 @@ boolean advanceByteBufUntilNewline(ByteBuf bb) { return false; } + CompositeByteBuf addSliceToComposite(ChannelHandlerContext ctx, CompositeByteBuf priorBuf, ByteBuf sourceBuf, + int start, int len) { + if (len == 0) { + return priorBuf; + } + if (priorBuf == null) { + priorBuf = ctx.alloc().compositeBuffer(4); + } + priorBuf.addComponent(true, sourceBuf.retainedSlice(start, len)); + return priorBuf; + } + @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (!(msg instanceof ByteBuf)) { @@ -80,7 +88,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception var sourceBuf = (ByteBuf) msg; var currentSourceSegmentStart = (previousRemaining != null || dropUntilNewline) ? -1 : sourceBuf.readerIndex(); - var cleanedIncomingBuf = ctx.alloc().compositeBuffer(4); + CompositeByteBuf cleanedIncomingBuf = null; while (sourceBuf.isReadable()) { if (previousRemaining != null) { @@ -89,8 +97,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (currentSourceSegmentStart >= 0 && sourceReaderIdx != currentSourceSegmentStart) // would be 0-length { - cleanedIncomingBuf.addComponent(true, - sourceBuf.retainedSlice(currentSourceSegmentStart, sourceReaderIdx-currentSourceSegmentStart)); + cleanedIncomingBuf = addSliceToComposite(ctx, cleanedIncomingBuf, sourceBuf, + currentSourceSegmentStart, sourceReaderIdx-currentSourceSegmentStart); currentSourceSegmentStart = -1; } } else if (currentSourceSegmentStart == -1) { @@ -104,11 +112,15 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception } } } + if (currentSourceSegmentStart >= 0) { - cleanedIncomingBuf.addComponent(true, - sourceBuf.retainedSlice(currentSourceSegmentStart, sourceBuf.readerIndex()-currentSourceSegmentStart)); + cleanedIncomingBuf = addSliceToComposite(ctx, cleanedIncomingBuf, sourceBuf, + currentSourceSegmentStart, sourceBuf.readerIndex()-currentSourceSegmentStart); + } + sourceBuf.release(); + if (cleanedIncomingBuf != null) { + super.channelRead(ctx, cleanedIncomingBuf); } - super.channelRead(ctx, cleanedIncomingBuf); } @Override diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java index 348461b15..a9013cb84 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java @@ -54,10 +54,11 @@ private void runTestWithSize(String lineEnding, IntStream sizes) { var channel = new EmbeddedChannel(new HeaderAdderHandler(newHeader)); sliceMessageIntoChannelWrites(channel, msg, sizes); var output = Unpooled.compositeBuffer(); - channel.inboundMessages().forEach(v -> output.addComponent(true, (ByteBuf) v)); - channel.close(); + channel.inboundMessages().forEach(v -> output.addComponent(true, ((ByteBuf) v).retain())); + channel.finishAndReleaseAll(); Assertions.assertEquals(makeMessage(lineEnding, extraHeader), output.toString(StandardCharsets.UTF_8)); + output.release(); } public static void sliceMessageIntoChannelWrites(EmbeddedChannel channel, String msg, IntStream sizes) { @@ -73,7 +74,11 @@ public static void sliceMessageIntoChannelWrites(EmbeddedChannel channel, String return substr; }) .takeWhile(Objects::nonNull) - .forEach(substr -> channel.writeInbound(Unpooled.wrappedBuffer(substr.getBytes(StandardCharsets.UTF_8)))); + .forEach(substr -> { + var bytes = substr.getBytes(StandardCharsets.UTF_8); + var buf = channel.alloc().buffer(bytes.length); + channel.writeInbound(buf.writeBytes(bytes)); + }); } String makeMessage(String lineEnding, String extraHeader) { diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java index e48bc32d8..3a9cb25e8 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java @@ -13,7 +13,6 @@ import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import io.netty.buffer.ByteBuf; -import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.util.ReferenceCountUtil; import lombok.extern.slf4j.Slf4j; @@ -22,7 +21,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; -@WrapWithNettyLeakDetection() +@WrapWithNettyLeakDetection(repetitions = 1) @Slf4j class HeaderRemoverHandlerTest { @@ -41,13 +40,14 @@ public void runTestWithSize(Function messageMaker, IntStream siz var channel = new EmbeddedChannel(new HeaderRemoverHandler("host:")); HeaderAdderHandlerTest.sliceMessageIntoChannelWrites(channel, sourceMsg, sizes); - var output = Unpooled.compositeBuffer(); - channel.inboundMessages().forEach(v -> output.addComponent(true, ((ByteBuf) v))); - channel.close(); + var outputBuf = channel.alloc().compositeBuffer(); + channel.inboundMessages().forEach(v -> outputBuf.addComponent(true, ((ByteBuf) v).retain())); + channel.finishAndReleaseAll(); - Assertions.assertEquals(messageMaker.apply(false), output.toString(StandardCharsets.UTF_8), + var outputString = outputBuf.toString(StandardCharsets.UTF_8); + Assertions.assertEquals(messageMaker.apply(false), outputString, "Error converting source message: " + sourceMsg); - output.release(); + outputBuf.release(); } @Test From 7ccb7c90ec84d356f2ce11c481ea03b40cbd408f Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 13 Sep 2024 17:40:51 -0400 Subject: [PATCH 14/38] Undo the spotless changes for JCommander parameters. Signed-off-by: Greg Schohn --- .../proxyserver/CaptureProxy.java | 99 ++++++++++++------- 1 file changed, 66 insertions(+), 33 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index bbb350147..ca9e67436 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -62,59 +62,92 @@ public class CaptureProxy { public static final String DEFAULT_KAFKA_CLIENT_ID = "HttpCaptureProxyProducer"; public static class Parameters { - @Parameter(required = false, names = { - "--traceDirectory" }, arity = 1, description = "Directory to store trace files in.") + @Parameter(required = false, + names = { "--traceDirectory" }, + arity = 1, + description = "Directory to store trace files in.") public String traceDirectory; - @Parameter(required = false, names = { - "--noCapture" }, arity = 0, description = "If enabled, Does NOT capture traffic to ANY sink.") + @Parameter(required = false, + names = { "--noCapture" }, + arity = 0, + description = "If enabled, Does NOT capture traffic to ANY sink.") public boolean noCapture; - @Parameter(required = false, names = { - "--kafkaConfigFile" }, arity = 1, description = "Kafka properties file for additional client customization.") + @Parameter(required = false, + names = { "--kafkaConfigFile" }, + arity = 1, + description = "Kafka properties file for additional client customization.") public String kafkaPropertiesFile; - @Parameter(required = false, names = { - "--kafkaClientId" }, arity = 1, description = "clientId to use for interfacing with Kafka.") + @Parameter(required = false, + names = { "--kafkaClientId" }, + arity = 1, + description = "clientId to use for interfacing with Kafka.") public String kafkaClientId = DEFAULT_KAFKA_CLIENT_ID; - @Parameter(required = false, names = { - "--kafkaConnection" }, arity = 1, description = "Sequence of values delimited by ','.") + @Parameter(required = false, + names = { "--kafkaConnection" }, + arity = 1, + description = "Sequence of values delimited by ','.") public String kafkaConnection; - @Parameter(required = false, names = { - "--enableMSKAuth" }, arity = 0, description = "Enables SASL Kafka properties required for connecting to MSK with IAM auth.") + @Parameter(required = false, + names = { "--enableMSKAuth" }, + arity = 0, + description = "Enables SASL Kafka properties required for connecting to MSK with IAM auth.") public boolean mskAuthEnabled = false; - @Parameter(required = false, names = { - "--sslConfigFile" }, arity = 1, description = "YAML configuration of the HTTPS settings. When this is not set, the proxy will not use TLS.") + @Parameter(required = false, + names = { "--sslConfigFile" }, + arity = 1, + description = "YAML configuration of the HTTPS settings. When this is not set, the proxy will not use TLS.") public String sslConfigFilePath; - @Parameter(required = false, names = { - "--maxTrafficBufferSize" }, arity = 1, description = "The maximum number of bytes that will be written to a single TrafficStream object.") + @Parameter(required = false, + names = { "--maxTrafficBufferSize" }, + arity = 1, + description = "The maximum number of bytes that will be written to a single TrafficStream object.") public int maximumTrafficStreamSize = 1024 * 1024; - @Parameter(required = false, names = { - "--insecureDestination" }, arity = 0, description = "Do not check the destination server's certificate") + @Parameter(required = false, + names = { "--insecureDestination" }, + arity = 0, + description = "Do not check the destination server's certificate") public boolean allowInsecureConnectionsToBackside; - @Parameter(required = true, names = { - "--destinationUri" }, arity = 1, description = "URI of the server that the proxy is capturing traffic for.") + @Parameter(required = true, + names = { "--destinationUri" }, + arity = 1, + description = "URI of the server that the proxy is capturing traffic for.") public String backsideUriString; - @Parameter(required = true, names = { - "--listenPort" }, arity = 1, description = "Exposed port for clients to connect to this proxy.") + @Parameter(required = true, + names = { "--listenPort" }, + arity = 1, + description = "Exposed port for clients to connect to this proxy.") public int frontsidePort = 0; - @Parameter(required = false, names = { - "--numThreads" }, arity = 1, description = "How many threads netty should create in its event loop group") + @Parameter(required = false, + names = { "--numThreads" }, + arity = 1, + description = "How many threads netty should create in its event loop group") public int numThreads = 1; - @Parameter(required = false, names = { - "--destinationConnectionPoolSize" }, arity = 1, description = "Number of socket connections that should be maintained to the destination server " + @Parameter(required = false, + names = { "--destinationConnectionPoolSize" }, + arity = 1, + description = "Number of socket connections that should be maintained to the destination server " + "to reduce the perceived latency to clients. Each thread will have its own cache, so the " + "total number of outstanding warm connections will be multiplied by numThreads.") public int destinationConnectionPoolSize = 0; - @Parameter(required = false, names = { - "--destinationConnectionPoolTimeout" }, arity = 1, description = "Of the socket connections maintained by the destination connection pool, " + @Parameter(required = false, + names = { "--destinationConnectionPoolTimeout" }, + arity = 1, + description = "Of the socket connections maintained by the destination connection pool, " + "how long after connection should the be recycled " + "(closed with a new connection taking its place)") public String destinationConnectionPoolTimeout = "PT30S"; - @Parameter(required = false, names = { - "--otelCollectorEndpoint" }, arity = 1, description = "Endpoint (host:port) for the OpenTelemetry Collector to which metrics logs should be forwarded." + @Parameter(required = false, + names = { "--otelCollectorEndpoint" }, + arity = 1, + description = "Endpoint (host:port) for the OpenTelemetry Collector to which metrics logs should be forwarded." + "If this is not provided, metrics will not be sent to a collector.") public String otelCollectorEndpoint; - @Parameter(required = false, names = "--suppressCaptureForHeaderMatch", arity = 2, description = "The header name (which will be interpreted in a case-insensitive manner) and a regex " - + "pattern. When the incoming request has a header that matches the regex, it will be passed " - + "through to the service but will NOT be captured. E.g. user-agent 'healthcheck'.") + @Parameter(required = false, + names = "--suppressCaptureForHeaderMatch", + arity = 2, + description = "The header name (which will be interpreted in a case-insensitive manner) and a regex " + + "pattern. When the incoming request has a header that matches the regex, it will be passed " + + "through to the service but will NOT be captured. E.g. user-agent 'healthcheck'.") public List suppressCaptureHeaderPairs = new ArrayList<>(); } From 0784a7566ecd955c045b63a8c4211441b75c82ba Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Fri, 13 Sep 2024 19:41:07 -0400 Subject: [PATCH 15/38] Untested changes to setup a --setHeader option to the proxy to add/overwrite an HTTP header. Signed-off-by: Greg Schohn --- .../proxyserver/CaptureProxy.java | 58 ++++++++++++++++--- .../proxyserver/netty/HeaderAdderHandler.java | 13 ++++- .../netty/NettyScanningHttpProxy.java | 28 +++------ .../netty/ProxyChannelInitializer.java | 6 +- .../netty/HeaderAdderHandlerTest.java | 4 +- .../netty/NettyScanningHttpProxyTest.java | 3 +- 6 files changed, 76 insertions(+), 36 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index ca9e67436..ed2c25f9a 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -4,6 +4,7 @@ import java.io.IOException; import java.io.OutputStream; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.time.Duration; @@ -21,6 +22,8 @@ import javax.net.ssl.SSLException; import com.google.protobuf.CodedOutputStream; +import io.netty.buffer.Unpooled; +import io.netty.channel.socket.SocketChannel; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -38,8 +41,12 @@ import org.opensearch.migrations.trafficcapture.StreamLifecycleManager; import org.opensearch.migrations.trafficcapture.kafkaoffloader.KafkaCaptureFactory; import org.opensearch.migrations.trafficcapture.netty.HeaderValueFilteringCapturePredicate; +import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; import org.opensearch.migrations.trafficcapture.proxyserver.netty.BacksideConnectionPool; +import org.opensearch.migrations.trafficcapture.proxyserver.netty.HeaderAdderHandler; +import org.opensearch.migrations.trafficcapture.proxyserver.netty.HeaderRemoverHandler; import org.opensearch.migrations.trafficcapture.proxyserver.netty.NettyScanningHttpProxy; +import org.opensearch.migrations.trafficcapture.proxyserver.netty.ProxyChannelInitializer; import org.opensearch.migrations.utils.ProcessHelpers; import org.opensearch.security.ssl.DefaultSecurityKeyStore; import org.opensearch.security.ssl.util.SSLConfigConstants; @@ -142,6 +149,12 @@ public static class Parameters { description = "Endpoint (host:port) for the OpenTelemetry Collector to which metrics logs should be forwarded." + "If this is not provided, metrics will not be sent to a collector.") public String otelCollectorEndpoint; + @Parameter(required = false, + names = "--setHeader", + arity = 2, + description = "[header-name header-value] Set an HTTP header (first argument) with to the specified value" + + " (second argument). Any existing headers with that name will be removed.") + public List headerOverrides = new ArrayList<>(); @Parameter(required = false, names = "--suppressCaptureForHeaderMatch", arity = 2, @@ -346,7 +359,7 @@ public static void main(String[] args) throws InterruptedException, IOException var params = parseArgs(args); var backsideUri = convertStringToUri(params.backsideUriString); - var rootContext = new RootCaptureContext( + var ctx = new RootCaptureContext( RootOtelContext.initializeOpenTelemetryWithCollectorOrAsNoop(params.otelCollectorEndpoint, "capture", ProcessHelpers.getNodeInstanceName()), new CompositeContextTracker(new ActiveContextTracker(), new ActiveContextTrackerByActivityType()) @@ -382,14 +395,10 @@ public static void main(String[] args) throws InterruptedException, IOException var headerCapturePredicate = new HeaderValueFilteringCapturePredicate( convertPairListToMap(params.suppressCaptureHeaderPairs) ); - proxy.start( - rootContext, - backsideConnectionPool, - params.numThreads, - sslEngineSupplier, - getConnectionCaptureFactory(params, rootContext), - headerCapturePredicate - ); + var proxyChannelInitializer = + buildProxyChannelInitializer(ctx, backsideConnectionPool, sslEngineSupplier, headerCapturePredicate, + params.headerOverrides, getConnectionCaptureFactory(params, ctx)); + proxy.start(proxyChannelInitializer, params.numThreads); } catch (Exception e) { log.atError().setCause(e).setMessage("Caught exception while setting up the server and rethrowing").log(); throw e; @@ -408,4 +417,35 @@ public static void main(String[] args) throws InterruptedException, IOException // work in the background. proxy.waitForClose(); } + + static ProxyChannelInitializer buildProxyChannelInitializer(RootCaptureContext rootContext, + BacksideConnectionPool backsideConnectionPool, + Supplier sslEngineSupplier, + @NonNull RequestCapturePredicate headerCapturePredicate, + List headerOverrides, + IConnectionCaptureFactory connectionFactory) + { + var headers = convertPairListToMap(headerOverrides); + return new ProxyChannelInitializer( + rootContext, + backsideConnectionPool, + sslEngineSupplier, + connectionFactory, + headerCapturePredicate + ) { + @Override + protected void initChannel(@NonNull SocketChannel ch) throws IOException { + super.initChannel(ch); + for (var k : headers.keySet()) { + ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "RemoveHeader-" + k, + new HeaderRemoverHandler(k + ":")); + } + for (var kvp : headers.entrySet()) { + var lineBytes = (kvp.getKey() + ":" + kvp.getValue()).getBytes(StandardCharsets.UTF_8); + ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "AddHeader-" + kvp.getKey(), + new HeaderAdderHandler(Unpooled.unreleasableBuffer(Unpooled.wrappedBuffer(lineBytes)))); + } + } + }; + } } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java index b638f0039..ee2226032 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java @@ -1,13 +1,20 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; +import java.nio.charset.StandardCharsets; + import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; public class HeaderAdderHandler extends ChannelInboundHandlerAdapter { + private static final ByteBuf CRLF_BYTE_BUF = + Unpooled.unreleasableBuffer(Unpooled.wrappedBuffer("\r\n".getBytes(StandardCharsets.UTF_8))); + private static final ByteBuf LF_BYTE_BUF = + Unpooled.unreleasableBuffer(Unpooled.wrappedBuffer("\n".getBytes(StandardCharsets.UTF_8))); boolean insertedHeader = false; private final ByteBuf headerLineToAdd; + boolean useCarriageReturn; public HeaderAdderHandler(ByteBuf headerLineToAdd) { this.headerLineToAdd = headerLineToAdd.retain(); @@ -22,12 +29,16 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception var buf = (ByteBuf) msg; buf.markReaderIndex(); while (buf.isReadable()) { - if (buf.readByte() == '\n') { + var nextByte = buf.readByte(); + if (nextByte == '\r') { + useCarriageReturn = true; + } else if (nextByte == '\n') { final var upToIndex = buf.readerIndex(); var composite = Unpooled.compositeBuffer(3); buf.resetReaderIndex(); composite.addComponent(true, buf.retainedSlice(0, upToIndex)); composite.addComponent(true, headerLineToAdd.duplicate()); + composite.addComponent(true, (useCarriageReturn ? CRLF_BYTE_BUF : LF_BYTE_BUF).duplicate()); composite.addComponent(true, buf.retainedSlice(upToIndex, buf.readableBytes()-upToIndex)); buf.release(); super.channelRead(ctx, composite); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java index f8770930b..4c4b2736a 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java @@ -12,11 +12,14 @@ import io.netty.channel.ChannelOption; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.util.concurrent.DefaultThreadFactory; +import lombok.Getter; import lombok.NonNull; public class NettyScanningHttpProxy { + @Getter protected final int proxyPort; protected Channel mainChannel; protected EventLoopGroup workerGroup; @@ -26,33 +29,16 @@ public NettyScanningHttpProxy(int proxyPort) { this.proxyPort = proxyPort; } - public int getProxyPort() { - return proxyPort; - } - - public void start( - IRootWireLoggingContext rootContext, - BacksideConnectionPool backsideConnectionPool, - int numThreads, - Supplier sslEngineSupplier, - IConnectionCaptureFactory connectionCaptureFactory, - @NonNull RequestCapturePredicate requestCapturePredicate - ) throws InterruptedException { + public void start(ProxyChannelInitializer proxyChannelInitializer, int numThreads) + throws InterruptedException + { bossGroup = new NioEventLoopGroup(1, new DefaultThreadFactory("captureProxyPoolBoss")); workerGroup = new NioEventLoopGroup(numThreads, new DefaultThreadFactory("captureProxyPoolWorker")); ServerBootstrap serverBootstrap = new ServerBootstrap(); try { mainChannel = serverBootstrap.group(bossGroup, workerGroup) .channel(NioServerSocketChannel.class) - .childHandler( - new ProxyChannelInitializer<>( - rootContext, - backsideConnectionPool, - sslEngineSupplier, - connectionCaptureFactory, - requestCapturePredicate - ) - ) + .childHandler(proxyChannelInitializer) .childOption(ChannelOption.AUTO_READ, false) .bind(proxyPort) .sync() diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java index b32a6e6e8..96972ec60 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ProxyChannelInitializer.java @@ -17,6 +17,8 @@ import lombok.NonNull; public class ProxyChannelInitializer extends ChannelInitializer { + protected static final String CAPTURE_HANDLER_NAME = "CaptureHandler"; + protected final IConnectionCaptureFactory connectionCaptureFactory; protected final Supplier sslEngineProvider; protected final IRootWireLoggingContext rootContext; @@ -46,7 +48,7 @@ public boolean shouldGuaranteeMessageOffloading(HttpRequest httpRequest) { } @Override - protected void initChannel(SocketChannel ch) throws IOException { + protected void initChannel(@NonNull SocketChannel ch) throws IOException { var sslContext = sslEngineProvider != null ? sslEngineProvider.get() : null; if (sslContext != null) { ch.pipeline().addLast(new SslHandler(sslEngineProvider.get())); @@ -54,7 +56,7 @@ protected void initChannel(SocketChannel ch) throws IOException { var connectionId = ch.id().asLongText(); ch.pipeline() - .addLast( + .addLast(CAPTURE_HANDLER_NAME, new ConditionallyReliableLoggingHttpHandler<>( rootContext, "", diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java index a9013cb84..f730691e3 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java @@ -47,7 +47,7 @@ public void fragmentedBytesCheck(String sizesStr) { } private void runTestWithSize(String lineEnding, IntStream sizes) { - var extraHeader = "host: my.host\n"; + var extraHeader = "host: my.host"; var newHeader = Unpooled.wrappedBuffer(extraHeader.getBytes(StandardCharsets.UTF_8)); final var msg = makeMessage(lineEnding, ""); @@ -57,7 +57,7 @@ private void runTestWithSize(String lineEnding, IntStream sizes) { channel.inboundMessages().forEach(v -> output.addComponent(true, ((ByteBuf) v).retain())); channel.finishAndReleaseAll(); - Assertions.assertEquals(makeMessage(lineEnding, extraHeader), output.toString(StandardCharsets.UTF_8)); + Assertions.assertEquals(makeMessage(lineEnding, extraHeader + lineEnding), output.toString(StandardCharsets.UTF_8)); output.release(); } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java index 6cfd4ec01..be4ffa582 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java @@ -207,7 +207,8 @@ private static Tuple startServers( var connectionPool = new BacksideConnectionPool(testServerUri, null, 10, Duration.ofSeconds(10)); nshp.get() - .start(rootCtx, connectionPool, 1, null, connectionCaptureFactory, new RequestCapturePredicate()); + .start(new ProxyChannelInitializer(rootCtx, connectionPool, null, + connectionCaptureFactory, new RequestCapturePredicate()), 1); System.out.println("proxy port = " + port); } catch (InterruptedException e) { Thread.currentThread().interrupt(); From 58a1892ccf659cdc5181a1c808c0464fb047afcb Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Mon, 16 Sep 2024 10:34:51 -0400 Subject: [PATCH 16/38] Bugfixes for CaptureProxy --setHeader and expanded the scope of the "HttpFirstLine" test fixture to include headers. In writing a test for the CaptureProxy, I wanted to be able to capture the headers that were sent to the server, so I had to expand the "first line" object to be a more complete HTTP request. The CaptureProxy root class itself had the add and remove handlers in the pipeline in the opposite order, so that was flipped and the new test now works. Signed-off-by: Greg Schohn --- ...CoordinatorErrantAcquisitonsRetryTest.java | 12 ++-- .../java/com/rfs/common/RestClientTest.java | 4 +- .../proxyserver/CaptureProxy.java | 17 +++-- .../proxyserver/netty/HeaderAdderHandler.java | 2 +- .../netty/NettyScanningHttpProxy.java | 7 -- .../proxyserver/TestHeaderRewrites.java | 70 +++++++++++++++++++ .../netty/HeaderAdderHandlerTest.java | 11 +-- .../netty/HeaderRemoverHandlerTest.java | 13 ++-- .../proxyserver/netty/MatcherTest.java | 57 --------------- .../netty/NettyScanningHttpProxyTest.java | 4 +- .../testcontainers/CaptureProxyContainer.java | 19 +++-- .../NettyPacketToHttpConsumerTest.java | 6 +- ...ficStreamBecomesTwoTargetChannelsTest.java | 8 +-- .../replay/TestHttpServerContext.java | 8 +-- .../migrations/testutils/HttpFirstLine.java | 11 --- .../migrations/testutils/HttpRequest.java | 15 ++++ .../testutils/HttpRequestFirstLine.java | 11 --- .../testutils/SimpleHttpServer.java | 45 +++++------- .../testutils/SimpleNettyHttpServer.java | 24 ++++--- 19 files changed, 174 insertions(+), 170 deletions(-) create mode 100644 TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java delete mode 100644 TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java delete mode 100644 testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpFirstLine.java create mode 100644 testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequest.java delete mode 100644 testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequestFirstLine.java diff --git a/RFS/src/test/java/com/rfs/cms/WorkCoordinatorErrantAcquisitonsRetryTest.java b/RFS/src/test/java/com/rfs/cms/WorkCoordinatorErrantAcquisitonsRetryTest.java index 1b95c3ddc..4938a9049 100644 --- a/RFS/src/test/java/com/rfs/cms/WorkCoordinatorErrantAcquisitonsRetryTest.java +++ b/RFS/src/test/java/com/rfs/cms/WorkCoordinatorErrantAcquisitonsRetryTest.java @@ -13,7 +13,7 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -import org.opensearch.migrations.testutils.HttpRequestFirstLine; +import org.opensearch.migrations.testutils.HttpRequest; import org.opensearch.migrations.testutils.SimpleHttpResponse; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; @@ -108,7 +108,7 @@ private static Stream makeArgs() { @MethodSource(value = "makeArgs") public void testSecondPhaseLeaseAcquisitionFailureKeepsRetrying( Class exceptionClassToTest, - Function> responseFactory) + Function> responseFactory) throws Exception { var pathToCounts = new PathCounts(); @@ -183,23 +183,23 @@ void reset() { } @NonNull - private static Function> + private static Function> getCountingResponseMakerWithSearchBody(String searchResponse) { var payloadBytes = searchResponse.getBytes(StandardCharsets.UTF_8); return pathCounts -> getCountingResponseMaker(pathCounts, makeResponse(200, "OK", payloadBytes)); } @NonNull - private static Function> + private static Function> getCountingResponseMaker(SimpleHttpResponse searchResponse) { return pathCounts -> getCountingResponseMaker(pathCounts, searchResponse); } @NonNull - private static Function + private static Function getCountingResponseMaker(PathCounts pathToCountMap, SimpleHttpResponse searchResponse) { return httpRequestFirstLine -> { - final var uriPath = httpRequestFirstLine.path().getPath(); + final var uriPath = httpRequestFirstLine.getPath().getPath(); if (uriPath.startsWith("/" + OpenSearchWorkCoordinator.INDEX_NAME + "/_refresh")) { ++pathToCountMap.refreshes; return makeResponse(200, "OK", diff --git a/RFS/src/test/java/com/rfs/common/RestClientTest.java b/RFS/src/test/java/com/rfs/common/RestClientTest.java index 7b29f00ae..c76680bbf 100644 --- a/RFS/src/test/java/com/rfs/common/RestClientTest.java +++ b/RFS/src/test/java/com/rfs/common/RestClientTest.java @@ -12,7 +12,7 @@ import org.junit.jupiter.api.Test; import org.opensearch.migrations.snapshot.creation.tracing.SnapshotTestContext; -import org.opensearch.migrations.testutils.HttpRequestFirstLine; +import org.opensearch.migrations.testutils.HttpRequest; import org.opensearch.migrations.testutils.SimpleHttpResponse; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; @@ -122,7 +122,7 @@ public void testGetEmitsInstrumentation() throws Exception { } } - SimpleHttpResponse makeResponseContext(HttpRequestFirstLine firstLine) { + SimpleHttpResponse makeResponseContext(HttpRequest firstLine) { var payloadBytes = "Hi".getBytes(StandardCharsets.UTF_8); return new SimpleHttpResponse( Map.of("Content-Type", "text/plain", "content-length", payloadBytes.length + ""), diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index ed2c25f9a..c73b060bf 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -22,8 +22,6 @@ import javax.net.ssl.SSLException; import com.google.protobuf.CodedOutputStream; -import io.netty.buffer.Unpooled; -import io.netty.channel.socket.SocketChannel; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -54,6 +52,8 @@ import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; +import io.netty.buffer.Unpooled; +import io.netty.channel.socket.SocketChannel; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.util.InsecureTrustManagerFactory; @@ -345,6 +345,9 @@ protected static SslContext loadBacksideSslContext(URI serverUri, boolean allowI } protected static Map convertPairListToMap(List list) { + if (list == null) { + return Map.of(); + } var map = new TreeMap(); for (int i = 0; i < list.size(); i += 2) { map.put(list.get(i), list.get(i + 1)); @@ -436,15 +439,15 @@ static ProxyChannelInitializer buildProxyChannelInitializer(RootCaptureContext r @Override protected void initChannel(@NonNull SocketChannel ch) throws IOException { super.initChannel(ch); - for (var k : headers.keySet()) { - ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "RemoveHeader-" + k, - new HeaderRemoverHandler(k + ":")); - } for (var kvp : headers.entrySet()) { - var lineBytes = (kvp.getKey() + ":" + kvp.getValue()).getBytes(StandardCharsets.UTF_8); + var lineBytes = (kvp.getKey() + ": " + kvp.getValue()).getBytes(StandardCharsets.UTF_8); ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "AddHeader-" + kvp.getKey(), new HeaderAdderHandler(Unpooled.unreleasableBuffer(Unpooled.wrappedBuffer(lineBytes)))); } + for (var k : headers.keySet()) { + ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "RemoveHeader-" + k, + new HeaderRemoverHandler(k + ":")); + } } }; } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java index ee2226032..147684898 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java @@ -34,7 +34,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception useCarriageReturn = true; } else if (nextByte == '\n') { final var upToIndex = buf.readerIndex(); - var composite = Unpooled.compositeBuffer(3); + var composite = Unpooled.compositeBuffer(4); buf.resetReaderIndex(); composite.addComponent(true, buf.retainedSlice(0, upToIndex)); composite.addComponent(true, headerLineToAdd.duplicate()); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java index 4c4b2736a..9ed135890 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxy.java @@ -1,22 +1,15 @@ package org.opensearch.migrations.trafficcapture.proxyserver.netty; -import java.util.function.Supplier; -import javax.net.ssl.SSLEngine; -import org.opensearch.migrations.trafficcapture.IConnectionCaptureFactory; -import org.opensearch.migrations.trafficcapture.netty.RequestCapturePredicate; -import org.opensearch.migrations.trafficcapture.netty.tracing.IRootWireLoggingContext; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelOption; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; -import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.util.concurrent.DefaultThreadFactory; import lombok.Getter; -import lombok.NonNull; public class NettyScanningHttpProxy { @Getter diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java new file mode 100644 index 000000000..b45039166 --- /dev/null +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java @@ -0,0 +1,70 @@ +package org.opensearch.migrations.trafficcapture.proxyserver; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import org.opensearch.migrations.testutils.HttpRequest; +import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; +import org.opensearch.migrations.testutils.SimpleHttpResponse; +import org.opensearch.migrations.testutils.SimpleNettyHttpServer; +import org.opensearch.migrations.trafficcapture.proxyserver.testcontainers.CaptureProxyContainer; + +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class TestHeaderRewrites { + @Test + public void testRewrites() throws Exception { + final var payloadBytes = "Success".getBytes(StandardCharsets.UTF_8); + final var headers = Map.of( + "Content-Type", + "text/plain", + "Content-Length", + "" + payloadBytes.length + ); + var rewriteArgs = List.of( + "--setHeader", + "host", + "localhost", + "--setHeader", + "X-new-header", + "insignificant value" + ); + var capturedRequestList = new ArrayList(); + try (var destinationServer = SimpleNettyHttpServer.makeServer(false, + Duration.ofMinutes(10), + fl -> { + capturedRequestList.add(fl); + log.error("headers: " + fl.getHeaders().stream().map(kvp->kvp.getKey()+": "+kvp.getValue()) + .collect(Collectors.joining())); + return new SimpleHttpResponse(headers, payloadBytes, "OK", 200); + }); + var proxy = new CaptureProxyContainer(() -> destinationServer.localhostEndpoint().toString(), null, + rewriteArgs.stream()); + var client = new SimpleHttpClientForTesting()) + { + proxy.start(); + final var proxyEndpoint = CaptureProxyContainer.getUriFromContainer(proxy); + + + var allHeaders = new LinkedHashMap(); + allHeaders.put("Host", "localhost"); + allHeaders.put("User-Agent", "UnitTest"); + var response = client.makeGetRequest(new URI(proxyEndpoint), allHeaders.entrySet().stream()); + log.error("response=" + response); + var capturedRequest = capturedRequestList.get(capturedRequestList.size()-1).getHeaders().stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + Assertions.assertEquals("localhost", capturedRequest.get("host")); + Assertions.assertEquals("insignificant value", capturedRequest.get("X-new-header")); + } + } +} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java index f730691e3..874af2432 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandlerTest.java @@ -7,16 +7,17 @@ import java.util.function.Supplier; import java.util.stream.IntStream; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; @WrapWithNettyLeakDetection() @Slf4j @@ -88,4 +89,4 @@ String makeMessage(String lineEnding, String extraHeader) { "silLYHeader: yyy" + lineEnding + lineEnding; } -} \ No newline at end of file +} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java index 3a9cb25e8..d1011e7d7 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java @@ -2,7 +2,6 @@ import java.nio.charset.StandardCharsets; import java.util.Arrays; -import java.util.Comparator; import java.util.Random; import java.util.function.BiFunction; import java.util.function.Function; @@ -10,16 +9,16 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + import org.opensearch.migrations.testutils.WrapWithNettyLeakDetection; import io.netty.buffer.ByteBuf; import io.netty.channel.embedded.EmbeddedChannel; -import io.netty.util.ReferenceCountUtil; import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; @WrapWithNettyLeakDetection(repetitions = 1) @Slf4j @@ -154,4 +153,4 @@ static String makeConsecutiveMessage(boolean withHosts, String lineEnding) { "X: Y" + lineEnding + lineEnding; } -} \ No newline at end of file +} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java deleted file mode 100644 index 560555d5a..000000000 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/MatcherTest.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.opensearch.migrations.trafficcapture.proxyserver.netty; - -import java.nio.charset.StandardCharsets; -import java.time.Duration; -import java.util.regex.Pattern; - -import com.google.common.base.Strings; -import io.netty.buffer.ByteBuf; -import io.netty.buffer.Unpooled; -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -@Slf4j -public class MatcherTest { - - public static final ByteBuf BIG_BUF = - Unpooled.wrappedBuffer(Strings.repeat("ha", 100_000).getBytes(StandardCharsets.UTF_8)); - public static final ByteBuf SMALL_BUF = - Unpooled.wrappedBuffer(Strings.repeat("ha", 1).getBytes(StandardCharsets.UTF_8)); - - @Test - public void test() { - var p = Pattern.compile("^host:.*", Pattern.CASE_INSENSITIVE); - - Assertions.assertTrue( - bufMatches(p, Unpooled.wrappedBuffer("host: MYHOST".getBytes(StandardCharsets.UTF_8)))); - - getMatchTime(p, BIG_BUF, 1000); - getMatchTime(p, BIG_BUF, 1000); - - for (int i=0; i<1; ++i) { - final var MATCH_REPS = 100_000_000; - var smallTime = getMatchTime(p, SMALL_BUF, MATCH_REPS); - var bigTime = getMatchTime(p, BIG_BUF, MATCH_REPS); - log.info("smallTime = "+smallTime); - log.info("bigTime = "+bigTime); - } - } - - private static Duration getMatchTime(Pattern p, ByteBuf input, int i) { - final var start = System.nanoTime(); - boolean didMatch = false; - for (; i > 0; --i) { - didMatch |= bufMatches(p, input); - } - try { - return Duration.ofNanos(System.nanoTime() - start); - } finally { - Assertions.assertFalse(didMatch); - } - } - - public static boolean bufMatches(Pattern p, ByteBuf b) { - return p.matcher(b.getCharSequence(0, b.readableBytes(),StandardCharsets.UTF_8)).matches(); - } -} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java index be4ffa582..7523583c2 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/NettyScanningHttpProxyTest.java @@ -18,7 +18,7 @@ import org.junit.jupiter.api.Test; import org.opensearch.common.collect.Tuple; -import org.opensearch.migrations.testutils.HttpRequestFirstLine; +import org.opensearch.migrations.testutils.HttpRequest; import org.opensearch.migrations.testutils.PortFinder; import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; import org.opensearch.migrations.testutils.SimpleHttpResponse; @@ -218,7 +218,7 @@ private static Tuple startServers( return new Tuple<>(nshp.get(), underlyingPort); } - private static SimpleHttpResponse makeContext(HttpRequestFirstLine request) { + private static SimpleHttpResponse makeContext(HttpRequest request) { var headers = Map.of( "Content-Type", "text/plain", diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/testcontainers/CaptureProxyContainer.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/testcontainers/CaptureProxyContainer.java index 6bafcd3d0..7d03179d3 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/testcontainers/CaptureProxyContainer.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/testcontainers/CaptureProxyContainer.java @@ -6,6 +6,7 @@ import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.opensearch.migrations.testutils.PortFinder; import org.opensearch.migrations.trafficcapture.proxyserver.CaptureProxy; @@ -26,20 +27,24 @@ public class CaptureProxyContainer extends GenericContainer implements AutoClose private static final Duration TIMEOUT_DURATION = Duration.ofSeconds(30); private final Supplier destinationUriSupplier; private final Supplier kafkaUriSupplier; + private final List extraArgs; private Integer listeningPort; private Thread serverThread; - public CaptureProxyContainer( - final Supplier destinationUriSupplier, - final Supplier kafkaUriSupplier - ) { + public CaptureProxyContainer(final Supplier destinationUriSupplier, + final Supplier kafkaUriSupplier, + Stream extraArgs) { this.destinationUriSupplier = destinationUriSupplier; this.kafkaUriSupplier = kafkaUriSupplier; + this.extraArgs = extraArgs.collect(Collectors.toList()); + } + + public CaptureProxyContainer(Supplier destinationUriSupplier, Supplier kafkaUriSupplier) { + this(destinationUriSupplier, kafkaUriSupplier, Stream.of()); } public CaptureProxyContainer(final String destinationUri, final String kafkaUri) { - this.destinationUriSupplier = () -> destinationUri; - this.kafkaUriSupplier = () -> kafkaUri; + this(() -> destinationUri, () -> kafkaUri); } public CaptureProxyContainer(final Container destination, final KafkaContainer kafka) { @@ -74,6 +79,8 @@ public void start() { argsList.add(String.valueOf(listeningPort)); argsList.add("--insecureDestination"); + argsList.addAll(extraArgs); + CaptureProxy.main(argsList.toArray(new String[0])); } catch (Exception e) { throw new AssertionError("Should not have exception", e); diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index a787f52b7..106601040 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -35,7 +35,7 @@ import org.opensearch.migrations.replay.http.retries.NoRetryEvaluatorFactory; import org.opensearch.migrations.replay.traffic.source.BufferedFlowController; import org.opensearch.migrations.replay.util.TextTrackedFuture; -import org.opensearch.migrations.testutils.HttpRequestFirstLine; +import org.opensearch.migrations.testutils.HttpRequest; import org.opensearch.migrations.testutils.SimpleHttpClientForTesting; import org.opensearch.migrations.testutils.SimpleHttpResponse; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; @@ -85,7 +85,7 @@ protected TestContext makeInstrumentationContext() { return TestContext.withTracking(false, true); } - private static SimpleHttpResponse makeResponseContext(HttpRequestFirstLine request) { + private static SimpleHttpResponse makeResponseContext(HttpRequest request) { var headers = new TreeMap( Map.of( "Content-Type", @@ -100,7 +100,7 @@ private static SimpleHttpResponse makeResponseContext(HttpRequestFirstLine reque return new SimpleHttpResponse(headers, payloadBytes, "OK", 200); } - private static SimpleHttpResponse makeResponseContextLarge(HttpRequestFirstLine request) { + private static SimpleHttpResponse makeResponseContextLarge(HttpRequest request) { var headers = new TreeMap( Map.of( "Content-Type", diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/SlowAndExpiredTrafficStreamBecomesTwoTargetChannelsTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/SlowAndExpiredTrafficStreamBecomesTwoTargetChannelsTest.java index c240858e4..c5ecf5d03 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/SlowAndExpiredTrafficStreamBecomesTwoTargetChannelsTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/SlowAndExpiredTrafficStreamBecomesTwoTargetChannelsTest.java @@ -27,7 +27,7 @@ import org.opensearch.migrations.replay.traffic.source.ArrayCursorTrafficCaptureSource; import org.opensearch.migrations.replay.traffic.source.ArrayCursorTrafficSourceContext; import org.opensearch.migrations.replay.traffic.source.BlockingTrafficSource; -import org.opensearch.migrations.testutils.HttpRequestFirstLine; +import org.opensearch.migrations.testutils.HttpRequest; import org.opensearch.migrations.testutils.SimpleHttpResponse; import org.opensearch.migrations.testutils.SimpleNettyHttpServer; import org.opensearch.migrations.tracing.InMemoryInstrumentationBundle; @@ -172,7 +172,7 @@ static String makePath(String connection, int i) { return "/" + connection + "/" + Integer.toString(i); } - private static class TrackingResponseBuilder implements Function { + private static class TrackingResponseBuilder implements Function { List pathsReceivedList; CountDownLatch targetRequestsPending; @@ -182,8 +182,8 @@ public TrackingResponseBuilder(int expected) { } @Override - public SimpleHttpResponse apply(HttpRequestFirstLine firstLine) { - var pathReceived = firstLine.path().getPath(); + public SimpleHttpResponse apply(HttpRequest firstLine) { + var pathReceived = firstLine.getPath().getPath(); pathsReceivedList.add(pathReceived); var payloadBytes = pathReceived.getBytes(StandardCharsets.UTF_8); targetRequestsPending.countDown(); diff --git a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestHttpServerContext.java b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestHttpServerContext.java index 055463fea..00bdf7205 100644 --- a/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestHttpServerContext.java +++ b/TrafficCapture/trafficReplayer/src/testFixtures/java/org/opensearch/migrations/replay/TestHttpServerContext.java @@ -5,7 +5,7 @@ import java.util.Map; import java.util.Random; -import org.opensearch.migrations.testutils.HttpRequestFirstLine; +import org.opensearch.migrations.testutils.HttpRequest; import org.opensearch.migrations.testutils.SimpleHttpResponse; import lombok.Lombok; @@ -30,18 +30,18 @@ public static String getRequestStringForSimpleGet(String uriPath) { ); } - public static SimpleHttpResponse makeResponse(Random rand, HttpRequestFirstLine firstLine) { + public static SimpleHttpResponse makeResponse(Random rand, HttpRequest firstLine) { return makeResponse(firstLine, Duration.ofMillis(rand.nextInt(MAX_RESPONSE_TIME_MS))); } - public static SimpleHttpResponse makeResponse(HttpRequestFirstLine r, Duration responseWaitTime) { + public static SimpleHttpResponse makeResponse(HttpRequest r, Duration responseWaitTime) { try { Thread.sleep(responseWaitTime.toMillis()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw Lombok.sneakyThrow(e); } - String body = SERVER_RESPONSE_BODY_PREFIX + r.path(); + String body = SERVER_RESPONSE_BODY_PREFIX + r.getPath(); var payloadBytes = body.getBytes(StandardCharsets.UTF_8); var headers = Map.of( "Content-Type", diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpFirstLine.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpFirstLine.java deleted file mode 100644 index 7a6ad26d9..000000000 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpFirstLine.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.opensearch.migrations.testutils; - -import java.net.URI; - -public interface HttpFirstLine { - String verb(); - - URI path(); - - String version(); -} diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequest.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequest.java new file mode 100644 index 000000000..de487e162 --- /dev/null +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequest.java @@ -0,0 +1,15 @@ +package org.opensearch.migrations.testutils; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +public interface HttpRequest { + String getVerb(); + + URI getPath(); + + String getVersion(); + + List> getHeaders(); +} diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequestFirstLine.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequestFirstLine.java deleted file mode 100644 index fc944723d..000000000 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/HttpRequestFirstLine.java +++ /dev/null @@ -1,11 +0,0 @@ -package org.opensearch.migrations.testutils; - -import java.net.URI; - -public interface HttpRequestFirstLine { - String verb(); - - URI path(); - - String version(); -} diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleHttpServer.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleHttpServer.java index 5682ec71c..71c5b47d4 100644 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleHttpServer.java +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleHttpServer.java @@ -3,13 +3,19 @@ import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; +import java.util.AbstractMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import java.util.stream.Collectors; import com.sun.net.httpserver.HttpServer; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsParameters; import com.sun.net.httpserver.HttpsServer; +import lombok.AllArgsConstructor; +import lombok.Getter; import lombok.Lombok; /** @@ -26,7 +32,7 @@ public class SimpleHttpServer implements AutoCloseable { public static SimpleHttpServer makeServer( boolean useTls, - Function makeContext + Function makeContext ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { var testServerRef = new AtomicReference(); PortFinder.retryWithNewPortUntilNoThrow(port -> { @@ -39,31 +45,13 @@ public static SimpleHttpServer makeServer( return testServerRef.get(); } - public static class PojoHttpRequestFirstLine implements HttpRequestFirstLine { + @Getter + @AllArgsConstructor + public static class PojoHttpRequest implements HttpRequest { private final String verb; private final URI path; private final String version; - - public PojoHttpRequestFirstLine(String verb, URI path, String version) { - this.verb = verb; - this.path = path; - this.version = version; - } - - @Override - public String verb() { - return verb; - } - - @Override - public URI path() { - return path; - } - - @Override - public String version() { - return version; - } + private final List> headers; } private static HttpsServer createSecureServer(InetSocketAddress address) throws Exception { @@ -93,18 +81,21 @@ public void configure(HttpsParameters params) { * @param port * @return the port upon successfully binding the server */ - public SimpleHttpServer(boolean useTls, int port, Function contentMapper) + public SimpleHttpServer(boolean useTls, int port, Function contentMapper) throws Exception { var addr = new InetSocketAddress(LOCALHOST, port); this.useTls = useTls; httpServer = useTls ? createSecureServer(addr) : HttpServer.create(addr, 0); httpServer.createContext("/", httpExchange -> { - var requestToMatch = new PojoHttpRequestFirstLine( + var requestToMatch = new PojoHttpRequest( httpExchange.getRequestMethod(), httpExchange.getRequestURI(), - httpExchange.getProtocol() - ); + httpExchange.getProtocol(), + httpExchange.getRequestHeaders().entrySet().stream() + .flatMap(keyValueList -> keyValueList.getValue().stream() + .map(v -> new AbstractMap.SimpleEntry<>(keyValueList.getKey(), v))) + .collect(Collectors.toList())); var headersAndPayload = contentMapper.apply(requestToMatch); var responseHeaders = httpExchange.getResponseHeaders(); for (var kvp : headersAndPayload.headers.entrySet()) { diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java index f0ce2028a..9713e4676 100644 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java @@ -3,6 +3,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.time.Duration; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -56,7 +57,7 @@ public class SimpleNettyHttpServer implements AutoCloseable { public static SimpleNettyHttpServer makeServer( boolean useTls, - Function makeContext + Function makeContext ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { return makeServer(useTls, null, makeContext); } @@ -64,7 +65,7 @@ public static SimpleNettyHttpServer makeServer( public static SimpleNettyHttpServer makeServer( boolean useTls, Duration readTimeout, - Function makeContext + Function makeContext ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { var testServerRef = new AtomicReference(); PortFinder.retryWithNewPortUntilNoThrow(port -> { @@ -77,28 +78,31 @@ public static SimpleNettyHttpServer makeServer( return testServerRef.get(); } - private static class RequestToFirstLineAdapter implements HttpRequestFirstLine { + public static class RequestToAdapter implements HttpRequest { private final FullHttpRequest request; - public RequestToFirstLineAdapter(FullHttpRequest request) { + public RequestToAdapter(FullHttpRequest request) { this.request = request; } @Override - public String verb() { + public String getVerb() { return request.method().toString(); } @SneakyThrows @Override - public URI path() { + public URI getPath() { return new URI(request.uri()); } @Override - public String version() { + public String getVersion() { return request.protocolVersion().text(); } + + @Override + public List> getHeaders() { return request.headers().entries(); } } HttpHeaders convertHeaders(Map headers) { @@ -108,7 +112,7 @@ HttpHeaders convertHeaders(Map headers) { } private SimpleChannelInboundHandler makeHandlerFromResponseContext( - Function responseBuilder + Function responseBuilder ) { return new SimpleChannelInboundHandler<>() { @Override @@ -118,7 +122,7 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) { ctx.close(); return; } - var specifiedResponse = responseBuilder.apply(new RequestToFirstLineAdapter(req)); + var specifiedResponse = responseBuilder.apply(new RequestToAdapter(req)); var fullResponse = new DefaultFullHttpResponse( HttpVersion.HTTP_1_1, HttpResponseStatus.valueOf(specifiedResponse.statusCode, specifiedResponse.statusText), @@ -146,7 +150,7 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) { boolean useTLS, int port, Duration timeout, - Function responseBuilder + Function responseBuilder ) throws Exception { this.useTls = useTLS; this.port = port; From 27ba9d16e2ca97c27f998a22d759e6d571e0ed2d Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Thu, 12 Sep 2024 15:17:24 -0400 Subject: [PATCH 17/38] Mark some more tests with the isolatedTest tag. In one case, the test already had an "Isolated" tag explaining that there are checks dependent upon time (i.e. expirations). For another, mark the DocumentMigration EndToEndTest (which is different than the MetadataMigration test of the same name) as isolated. I've also bumped a timeout up on a network test. Signed-off-by: Greg Schohn --- .../src/test/java/com/rfs/EndToEndTest.java | 2 ++ TrafficCapture/build.gradle | 20 ++++++++++--------- .../ExpiringSubstitutableItemPoolTest.java | 2 ++ .../NettyPacketToHttpConsumerTest.java | 2 +- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/EndToEndTest.java b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/EndToEndTest.java index 7f338bab7..85aa305fd 100644 --- a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/EndToEndTest.java +++ b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/EndToEndTest.java @@ -6,6 +6,7 @@ import java.util.concurrent.CompletableFuture; import org.hamcrest.Matchers; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.io.TempDir; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ArgumentsSource; @@ -27,6 +28,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +@Tag("isolatedTest") public class EndToEndTest extends SourceTestBase { @TempDir private File localDirectory; diff --git a/TrafficCapture/build.gradle b/TrafficCapture/build.gradle index 7172ea770..5b3943eeb 100644 --- a/TrafficCapture/build.gradle +++ b/TrafficCapture/build.gradle @@ -33,15 +33,17 @@ allprojects { } } -tasks.named('test', Test) { - // Memory leak tests are adding too much execution time on `test` in TrafficCapture - // Disabling and will test in `slowTest` - systemProperty 'disableMemoryLeakTests', 'true' -} +subprojects { + tasks.named('test', Test) { + // Memory leak tests are adding too much execution time on `test` in TrafficCapture + // Disabling and will test in `slowTest` + it.systemProperty 'disableMemoryLeakTests', 'true' + } -tasks.named('slowTest', Test) { - useJUnitPlatform { - // Ensure rerunning all tests to run with leak detection - includeTags = [] + tasks.named('slowTest', Test) { + useJUnitPlatform { + includeTags = [] + excludeTags = ['isolatedTest'] + } } } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java index b3169cc60..15d038ac3 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java @@ -13,6 +13,7 @@ import java.util.stream.IntStream; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.parallel.Isolated; @@ -25,6 +26,7 @@ @Slf4j @Isolated("Isolation based on temporal checks") +@Tag("isolatedTest") class ExpiringSubstitutableItemPoolTest { public static final int NUM_POOLED_ITEMS = 5; diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java index 6ad0aa168..352fc23d1 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/datahandlers/NettyPacketToHttpConsumerTest.java @@ -440,7 +440,7 @@ public void testResponseTakesLongerThanTimeout(boolean useTls) throws Exception Instant.now(), Instant.now(), () -> Stream.of(EXPECTED_REQUEST_STRING.getBytes(StandardCharsets.UTF_8))); - var maxTimeToWaitForTimeoutOrResponse = Duration.ofSeconds(10); + var maxTimeToWaitForTimeoutOrResponse = Duration.ofSeconds(30); var aggregatedResponse = requestFinishFuture.get(maxTimeToWaitForTimeoutOrResponse); log.atInfo().setMessage("RequestFinishFuture finished").log(); Assertions.assertInstanceOf(ReadTimeoutException.class, aggregatedResponse.getError()); From 468268904a5adf1cef2c14ebc54c80ecebaaf124 Mon Sep 17 00:00:00 2001 From: Andre Kurait Date: Mon, 16 Sep 2024 23:46:23 -0500 Subject: [PATCH 18/38] Add inteligentTiering for logs efs Signed-off-by: Andre Kurait --- .../lib/migration-assistance-stack.ts | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts b/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts index c47524fd4..a9a0042df 100644 --- a/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/migration-assistance-stack.ts @@ -1,6 +1,6 @@ import {RemovalPolicy, Stack} from "aws-cdk-lib"; import {IVpc, Port, SecurityGroup, SubnetFilter, SubnetType} from "aws-cdk-lib/aws-ec2"; -import {FileSystem} from 'aws-cdk-lib/aws-efs'; +import {FileSystem, LifecyclePolicy, ThroughputMode} from 'aws-cdk-lib/aws-efs'; import {Construct} from "constructs"; import {CfnConfiguration} from "aws-cdk-lib/aws-msk"; import {Cluster} from "aws-cdk-lib/aws-ecs"; @@ -8,7 +8,11 @@ import {StackPropsExt} from "./stack-composer"; import {LogGroup, RetentionDays} from "aws-cdk-lib/aws-logs"; import {StreamingSourceType} from "./streaming-source-type"; import {Bucket, BucketEncryption} from "aws-cdk-lib/aws-s3"; -import {createMigrationStringParameter, MigrationSSMParameter, parseRemovalPolicy} from "./common-utilities"; +import { + createMigrationStringParameter, + MigrationSSMParameter, + parseRemovalPolicy +} from "./common-utilities"; import { ClientAuthentication, ClientBrokerEncryption, @@ -188,7 +192,9 @@ export class MigrationAssistanceStack extends Stack { const sharedLogsEFS = new FileSystem(this, 'sharedLogsEFS', { vpc: props.vpc, securityGroup: sharedLogsSG, - removalPolicy: replayerEFSRemovalPolicy + removalPolicy: replayerEFSRemovalPolicy, + lifecyclePolicy: LifecyclePolicy.AFTER_1_DAY, // Cost break even is at 26 downloads / month + throughputMode: ThroughputMode.BURSTING, // Best cost characteristics for write heavy, short-lived data }); createMigrationStringParameter(this, sharedLogsEFS.fileSystemId, { ...props, From 74dd2f7c9637d02067993505d7a50eb6900a063c Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 11:53:41 -0400 Subject: [PATCH 19/38] Proxy header translations: PR feedback and a critical bugfix. The bugfix was that the state machine for striking header lines would continue even AFTER the header was parsed. That meant that if a header name ever appeared at the beginning of a line within the contents of a payload, we'd strike that whole line. A test was added to exhibit the behavior, requiring some refactoring to the SimpleNettyHttpServer to get the payload that hits that server. That's used to confirm that we're leaving the body exactly intact in the new test. All other previous tests continue to pass. Signed-off-by: Greg Schohn --- .../proxyserver/CaptureProxy.java | 42 +++++++++--- .../proxyserver/netty/HeaderAdderHandler.java | 9 ++- .../netty/HeaderRemoverHandler.java | 44 ++++++++++-- .../proxyserver/TestHeaderRewrites.java | 68 +++++++++++++++++-- .../netty/HeaderRemoverHandlerTest.java | 8 +++ .../testutils/SimpleNettyHttpServer.java | 36 ++++++++-- 6 files changed, 178 insertions(+), 29 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index c73b060bf..0d891910e 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -9,11 +9,12 @@ import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; -import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; @@ -52,6 +53,7 @@ import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; +import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.socket.SocketChannel; import io.netty.handler.ssl.SslContext; @@ -348,7 +350,7 @@ protected static Map convertPairListToMap(List list) { if (list == null) { return Map.of(); } - var map = new TreeMap(); + var map = new LinkedHashMap(); for (int i = 0; i < list.size(); i += 2) { map.put(list.get(i), list.get(i + 1)); } @@ -425,10 +427,22 @@ static ProxyChannelInitializer buildProxyChannelInitializer(RootCaptureContext r BacksideConnectionPool backsideConnectionPool, Supplier sslEngineSupplier, @NonNull RequestCapturePredicate headerCapturePredicate, - List headerOverrides, + List headerOverridesArgs, IConnectionCaptureFactory connectionFactory) { - var headers = convertPairListToMap(headerOverrides); + var headers = new ArrayList<>(convertPairListToMap(headerOverridesArgs).entrySet()); + Collections.reverse(headers); + final var removeStrings = new ArrayList(headers.size()); + final var addBufs = new ArrayList(headers.size()); + + for (var kvp : headers) { + addBufs.add( + Unpooled.unreleasableBuffer( + Unpooled.wrappedBuffer( + (kvp.getKey() + ": " + kvp.getValue()).getBytes(StandardCharsets.UTF_8)))); + removeStrings.add(kvp.getKey() + ":"); + } + return new ProxyChannelInitializer( rootContext, backsideConnectionPool, @@ -439,14 +453,20 @@ static ProxyChannelInitializer buildProxyChannelInitializer(RootCaptureContext r @Override protected void initChannel(@NonNull SocketChannel ch) throws IOException { super.initChannel(ch); - for (var kvp : headers.entrySet()) { - var lineBytes = (kvp.getKey() + ": " + kvp.getValue()).getBytes(StandardCharsets.UTF_8); - ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "AddHeader-" + kvp.getKey(), - new HeaderAdderHandler(Unpooled.unreleasableBuffer(Unpooled.wrappedBuffer(lineBytes)))); + final var pipeline = ch.pipeline(); + { + int i = 0; + for (var kvp : headers) { + pipeline.addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "AddHeader-" + kvp.getKey(), + new HeaderAdderHandler(addBufs.get(i++))); + } } - for (var k : headers.keySet()) { - ch.pipeline().addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "RemoveHeader-" + k, - new HeaderRemoverHandler(k + ":")); + { + int i = 0; + for (var kvp : headers) { + pipeline.addAfter(ProxyChannelInitializer.CAPTURE_HANDLER_NAME, "RemoveHeader-" + kvp.getKey(), + new HeaderRemoverHandler(removeStrings.get(i++))); + } } } }; diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java index 147684898..e4bf3d528 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java @@ -6,6 +6,7 @@ import io.netty.buffer.Unpooled; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.util.ReferenceCountUtil; public class HeaderAdderHandler extends ChannelInboundHandlerAdapter { private static final ByteBuf CRLF_BYTE_BUF = @@ -37,7 +38,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception var composite = Unpooled.compositeBuffer(4); buf.resetReaderIndex(); composite.addComponent(true, buf.retainedSlice(0, upToIndex)); - composite.addComponent(true, headerLineToAdd.duplicate()); + composite.addComponent(true, headerLineToAdd.retainedDuplicate()); composite.addComponent(true, (useCarriageReturn ? CRLF_BYTE_BUF : LF_BYTE_BUF).duplicate()); composite.addComponent(true, buf.retainedSlice(upToIndex, buf.readableBytes()-upToIndex)); buf.release(); @@ -49,4 +50,10 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception buf.resetReaderIndex(); super.channelRead(ctx, msg); } + + @Override + public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { + ReferenceCountUtil.release(headerLineToAdd); + super.channelUnregistered(ctx); + } } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java index 56accf96b..1edf7afbe 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java @@ -16,6 +16,11 @@ public class HeaderRemoverHandler extends ChannelInboundHandlerAdapter { // when dropUntilNewline == true, we're dropping, otherwise, we're copying (when previousRemaining==null) // The starting state is previousRemaining == null and dropUntilNewline = false boolean dropUntilNewline; + MessagePosition requestPosition = MessagePosition.IN_HEADER; + + private enum MessagePosition { + IN_HEADER, ONE_NEW_LINE, AFTER_HEADERS, + } public HeaderRemoverHandler(String headerToRemove) { if (!headerToRemove.endsWith(":")) { @@ -49,10 +54,7 @@ boolean matchNextBytes(ChannelHandlerContext ctx, ByteBuf buf) { } buf.markReaderIndex(); if (Character.toLowerCase(headerToRemove.charAt(i)) != Character.toLowerCase(buf.readByte())) { // no match - previousRemaining.forEach(bb -> lambdaSafeSuperChannelRead(ctx, bb.retain())); - previousRemaining.removeComponents(0, previousRemaining.numComponents()); - previousRemaining.release(); - previousRemaining = null; + flushAndClearPreviousRemaining(ctx); buf.resetReaderIndex(); dropUntilNewline = false; return false; @@ -60,6 +62,13 @@ boolean matchNextBytes(ChannelHandlerContext ctx, ByteBuf buf) { } } + void flushAndClearPreviousRemaining(ChannelHandlerContext ctx) { + previousRemaining.forEach(bb -> lambdaSafeSuperChannelRead(ctx, bb.retain())); + previousRemaining.removeComponents(0, previousRemaining.numComponents()); + previousRemaining.release(); + previousRemaining = null; + } + boolean advanceByteBufUntilNewline(ByteBuf bb) { while (bb.isReadable()) { // sonar lint doesn't like if the while statement has an empty body if (bb.readByte() == '\n') { return true; } @@ -81,16 +90,36 @@ CompositeByteBuf addSliceToComposite(ChannelHandlerContext ctx, CompositeByteBuf @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - if (!(msg instanceof ByteBuf)) { + if (!(msg instanceof ByteBuf) || requestPosition == MessagePosition.AFTER_HEADERS) { super.channelRead(ctx, msg); return; } var sourceBuf = (ByteBuf) msg; - var currentSourceSegmentStart = (previousRemaining != null || dropUntilNewline) ? -1 : sourceBuf.readerIndex(); + var currentSourceSegmentStart = + (previousRemaining != null || dropUntilNewline || requestPosition == MessagePosition.ONE_NEW_LINE) + ? -1 : sourceBuf.readerIndex(); CompositeByteBuf cleanedIncomingBuf = null; + sourceBuf.markReaderIndex(); while (sourceBuf.isReadable()) { + if (requestPosition == MessagePosition.ONE_NEW_LINE) { + final var nextByte = sourceBuf.readByte(); + if (nextByte == '\n' || nextByte == '\r') { + requestPosition = MessagePosition.AFTER_HEADERS; + if (currentSourceSegmentStart == -1) { + currentSourceSegmentStart = sourceBuf.readerIndex() - 1; + } + sourceBuf.readerIndex(sourceBuf.writerIndex()); + break; + } else { + previousRemaining = ctx.alloc().compositeBuffer(16); + requestPosition = MessagePosition.IN_HEADER; + sourceBuf.resetReaderIndex(); + continue; + } + } + if (previousRemaining != null) { final var sourceReaderIdx = sourceBuf.readerIndex(); if (matchNextBytes(ctx, sourceBuf)) { @@ -106,7 +135,8 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception } } else { if (advanceByteBufUntilNewline(sourceBuf)) { - previousRemaining = ctx.alloc().compositeBuffer(16); + sourceBuf.markReaderIndex(); + requestPosition = MessagePosition.ONE_NEW_LINE; } else { break; } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java index b45039166..a110a3362 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/TestHeaderRewrites.java @@ -1,5 +1,6 @@ package org.opensearch.migrations.trafficcapture.proxyserver; +import java.io.ByteArrayInputStream; import java.net.URI; import java.nio.charset.StandardCharsets; import java.time.Duration; @@ -22,8 +23,15 @@ @Slf4j public class TestHeaderRewrites { + + public static final String ONLY_FOR_HEADERS_VALUE = "this is only for headers"; +public static final String BODY_WITH_HEADERS_CONTENTS = "\n" + + "body: should stay\n" + + "body: untouched\n" + + "body:\n"; + @Test - public void testRewrites() throws Exception { + public void testHeaderRewrites() throws Exception { final var payloadBytes = "Success".getBytes(StandardCharsets.UTF_8); final var headers = Map.of( "Content-Type", @@ -44,7 +52,7 @@ public void testRewrites() throws Exception { Duration.ofMinutes(10), fl -> { capturedRequestList.add(fl); - log.error("headers: " + fl.getHeaders().stream().map(kvp->kvp.getKey()+": "+kvp.getValue()) + log.trace("headers: " + fl.getHeaders().stream().map(kvp->kvp.getKey()+": "+kvp.getValue()) .collect(Collectors.joining())); return new SimpleHttpResponse(headers, payloadBytes, "OK", 200); }); @@ -55,16 +63,68 @@ public void testRewrites() throws Exception { proxy.start(); final var proxyEndpoint = CaptureProxyContainer.getUriFromContainer(proxy); - var allHeaders = new LinkedHashMap(); allHeaders.put("Host", "localhost"); allHeaders.put("User-Agent", "UnitTest"); var response = client.makeGetRequest(new URI(proxyEndpoint), allHeaders.entrySet().stream()); - log.error("response=" + response); var capturedRequest = capturedRequestList.get(capturedRequestList.size()-1).getHeaders().stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Assertions.assertEquals("localhost", capturedRequest.get("host")); Assertions.assertEquals("insignificant value", capturedRequest.get("X-new-header")); } } + + @Test + public void testBodyDoesntRewrite() throws Exception { + final var payloadBytes = "Success".getBytes(StandardCharsets.UTF_8); + final var headers = Map.of( + "Content-Type", + "text/plain", + "Content-Length", + "" + payloadBytes.length + ); + var rewriteArgs = List.of( + "--setHeader", + "host", + "localhost", + "--setHeader", + "body", + ONLY_FOR_HEADERS_VALUE + ); + var capturedRequestList = new ArrayList(); + var capturedBodies = new ArrayList(); + try (var destinationServer = SimpleNettyHttpServer.makeNettyServer(false, + Duration.ofMinutes(10), + fullRequest -> { + var request = new SimpleNettyHttpServer.RequestToAdapter(fullRequest); + capturedRequestList.add(request); + log.atTrace().setMessage(() -> "headers: " + + request.getHeaders().stream().map(kvp->kvp.getKey()+": "+kvp.getValue()) + .collect(Collectors.joining())).log(); + capturedBodies.add(fullRequest.content().toString(StandardCharsets.UTF_8)); + return new SimpleHttpResponse(headers, payloadBytes, "OK", 200); + }); + var proxy = new CaptureProxyContainer(() -> destinationServer.localhostEndpoint().toString(), null, + rewriteArgs.stream()); + var client = new SimpleHttpClientForTesting(); + var bodyStream = new ByteArrayInputStream(BODY_WITH_HEADERS_CONTENTS.getBytes(StandardCharsets.UTF_8))) + { + proxy.start(); + final var proxyEndpoint = CaptureProxyContainer.getUriFromContainer(proxy); + + var allHeaders = new LinkedHashMap(); + allHeaders.put("Host", "localhost"); + allHeaders.put("User-Agent", "UnitTest"); + var response = client.makePutRequest(new URI(proxyEndpoint), allHeaders.entrySet().stream(), + new SimpleHttpClientForTesting.PayloadAndContentType(bodyStream, "text/plain")); + log.error("response=" + response); + var capturedRequest = capturedRequestList.get(capturedRequestList.size()-1).getHeaders().stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + Assertions.assertEquals("localhost", capturedRequest.get("host")); + Assertions.assertEquals(ONLY_FOR_HEADERS_VALUE, capturedRequest.get("body")); + + var lastBody = capturedBodies.get(capturedBodies.size()-1); + Assertions.assertEquals(BODY_WITH_HEADERS_CONTENTS, lastBody); + } + } } diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java index d1011e7d7..133057c06 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandlerTest.java @@ -49,6 +49,12 @@ public void runTestWithSize(Function messageMaker, IntStream siz outputBuf.release(); } + @Test + public void newlinesArePreserved() { + runTestsWithSize((b,s) -> "GET / HTTP/1.1\r\n" + (b ? "host: localhost\r\n" : "") + "\r\n", + () -> IntStream.of(Integer.MAX_VALUE)); + } + @Test public void throwsOnHostFormatError() { Assertions.assertThrows(IllegalArgumentException.class, () -> new HeaderRemoverHandler("host")); @@ -87,6 +93,7 @@ public void randomFragmentedCheckInterlaced() { final var bound = getBound(HeaderRemoverHandlerTest::makeInterlacedMessage); for (int i=0; i "random run={}").addArgument(i).log(); runTestsWithSize(HeaderRemoverHandlerTest::makeInterlacedMessage, () -> IntStream.generate(() -> r.nextInt(bound))); } @@ -107,6 +114,7 @@ public void randomFragmentedCheckConsecutive() { final var bound = getBound(HeaderRemoverHandlerTest::makeConsecutiveMessage); for (int i=0; i "random run={}").addArgument(i).log(); runTestsWithSize(HeaderRemoverHandlerTest::makeConsecutiveMessage, () -> IntStream.generate(() -> r.nextInt(bound))); } diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java index 9713e4676..41c2fc260 100644 --- a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/testutils/SimpleNettyHttpServer.java @@ -30,6 +30,7 @@ import io.netty.handler.codec.http.HttpResponseEncoder; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.logging.LoggingHandler; import io.netty.handler.ssl.SslHandler; import io.netty.handler.timeout.ReadTimeoutHandler; import io.netty.util.concurrent.DefaultThreadFactory; @@ -59,13 +60,28 @@ public static SimpleNettyHttpServer makeServer( boolean useTls, Function makeContext ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { - return makeServer(useTls, null, makeContext); + return makeNettyServer(useTls, null, r -> makeContext.apply(new RequestToAdapter(r))); + } + + public static SimpleNettyHttpServer makeNettyServer( + boolean useTls, + Function makeContext + ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { + return makeNettyServer(useTls, null, makeContext); } public static SimpleNettyHttpServer makeServer( boolean useTls, Duration readTimeout, Function makeContext + ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { + return makeNettyServer(useTls, readTimeout, r -> makeContext.apply(new RequestToAdapter(r))); + } + + public static SimpleNettyHttpServer makeNettyServer( + boolean useTls, + Duration readTimeout, + Function makeContext ) throws PortFinder.ExceededMaxPortAssigmentAttemptException { var testServerRef = new AtomicReference(); PortFinder.retryWithNewPortUntilNoThrow(port -> { @@ -112,8 +128,13 @@ HttpHeaders convertHeaders(Map headers) { } private SimpleChannelInboundHandler makeHandlerFromResponseContext( - Function responseBuilder - ) { + Function responseBuilder) { + return makeHandlerFromNettyResponseContext(r -> responseBuilder.apply(new RequestToAdapter(r))); + } + + private SimpleChannelInboundHandler makeHandlerFromNettyResponseContext( + Function responseBuilder) + { return new SimpleChannelInboundHandler<>() { @Override protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) { @@ -122,7 +143,7 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) { ctx.close(); return; } - var specifiedResponse = responseBuilder.apply(new RequestToAdapter(req)); + var specifiedResponse = responseBuilder.apply(req); var fullResponse = new DefaultFullHttpResponse( HttpVersion.HTTP_1_1, HttpResponseStatus.valueOf(specifiedResponse.statusCode, specifiedResponse.statusText), @@ -150,7 +171,7 @@ protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) { boolean useTLS, int port, Duration timeout, - Function responseBuilder + Function responseBuilder ) throws Exception { this.useTls = useTLS; this.port = port; @@ -172,10 +193,13 @@ protected void initChannel(SocketChannel ch) { if (timeout != null) { pipeline.addLast(new ReadTimeoutHandler(timeout.toMillis(), TimeUnit.MILLISECONDS)); } + pipeline.addLast(new LoggingHandler("A")); pipeline.addLast(new HttpRequestDecoder()); + pipeline.addLast(new LoggingHandler("B")); pipeline.addLast(new HttpObjectAggregator(16 * 1024)); + pipeline.addLast(new LoggingHandler("C")); pipeline.addLast(new HttpResponseEncoder()); - pipeline.addLast(makeHandlerFromResponseContext(responseBuilder)); + pipeline.addLast(makeHandlerFromNettyResponseContext(responseBuilder)); } }); serverChannel = b.bind(port).sync().channel(); From 511568eeb3f64b3e7e98be07a2265d6d11fb79c9 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 12:04:39 -0400 Subject: [PATCH 20/38] Proxy header translation optimization. Create a CompositeByteBuf only when there are 2 or more ByteBufs that need to be treated as one. Otherwise just use a single buffer. Signed-off-by: Greg Schohn --- .../netty/HeaderRemoverHandler.java | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java index 1edf7afbe..ec981cce1 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderRemoverHandler.java @@ -76,16 +76,24 @@ boolean advanceByteBufUntilNewline(ByteBuf bb) { return false; } - CompositeByteBuf addSliceToComposite(ChannelHandlerContext ctx, CompositeByteBuf priorBuf, ByteBuf sourceBuf, - int start, int len) { + ByteBuf addSliceToRunningBuf(ChannelHandlerContext ctx, ByteBuf priorBuf, ByteBuf sourceBuf, + int start, int len) { if (len == 0) { return priorBuf; } + var slicedSourceBuf = sourceBuf.retainedSlice(start, len); if (priorBuf == null) { - priorBuf = ctx.alloc().compositeBuffer(4); + return slicedSourceBuf; } - priorBuf.addComponent(true, sourceBuf.retainedSlice(start, len)); - return priorBuf; + CompositeByteBuf cbb; + if (!(priorBuf instanceof CompositeByteBuf)) { + cbb = ctx.alloc().compositeBuffer(4); + cbb.addComponent(true, priorBuf); + } else { + cbb = (CompositeByteBuf) priorBuf; + } + cbb.addComponent(true, slicedSourceBuf); + return cbb; } @Override @@ -99,7 +107,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception var currentSourceSegmentStart = (previousRemaining != null || dropUntilNewline || requestPosition == MessagePosition.ONE_NEW_LINE) ? -1 : sourceBuf.readerIndex(); - CompositeByteBuf cleanedIncomingBuf = null; + ByteBuf cleanedIncomingBuf = null; sourceBuf.markReaderIndex(); while (sourceBuf.isReadable()) { @@ -126,7 +134,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (currentSourceSegmentStart >= 0 && sourceReaderIdx != currentSourceSegmentStart) // would be 0-length { - cleanedIncomingBuf = addSliceToComposite(ctx, cleanedIncomingBuf, sourceBuf, + cleanedIncomingBuf = addSliceToRunningBuf(ctx, cleanedIncomingBuf, sourceBuf, currentSourceSegmentStart, sourceReaderIdx-currentSourceSegmentStart); currentSourceSegmentStart = -1; } @@ -144,7 +152,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception } if (currentSourceSegmentStart >= 0) { - cleanedIncomingBuf = addSliceToComposite(ctx, cleanedIncomingBuf, sourceBuf, + cleanedIncomingBuf = addSliceToRunningBuf(ctx, cleanedIncomingBuf, sourceBuf, currentSourceSegmentStart, sourceBuf.readerIndex()-currentSourceSegmentStart); } sourceBuf.release(); From 946d80b864e8cc51db32c46e6441f4ca375766de Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 13:37:15 -0400 Subject: [PATCH 21/38] Add TESTING.md to give developers some understanding of how testing works w/ gradle. Signed-off-by: Greg Schohn --- TESTING.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 TESTING.md diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 000000000..eaadb87ea --- /dev/null +++ b/TESTING.md @@ -0,0 +1,28 @@ +## Gradle Usage + +[Gradle](https://gradle.org/) is used to build this repo, including its java artifacts as well as docker images. Gradle handles dependencies between projects, compiling java code, running [JUnit](https://junit.org/junit5/) tests, and building docker images. It can also deploy a demo/test environment via docker-compose for a rapid develop experience (see [dockerSolution](./TrafficCapture/dockerSolution/README.md)). + +The Gradle application is packaged within the repository, so one can simply run [gradlew](./gradlew) from the root of the repository. `./gradlew tasks` will show the tasks available at the top-level. `.../gradlew tasks` run in any subproject directory will show specific tasks that can be run for that project. Gradle can publish a scan to `scans.gradle.com` of its logs, performance, etc. at the end of its run, which can be used to diagnose a number of issues from test failures to build performance. + +This `OpensearchMigrations` Gradle project is composed of many subprojects, defined by [settings.gradle](settings.gradle). Those projects are configured similarly in the [build.gradle](./build.gradle) file. Additional settings are defined in the [gradle.properties](./gradle.properties) file. + +## Tests and Parallelization + +Gradle is configured to run most tasks from the projects in parallel, with a special exemption for tests marked to run them in total isolation of anything else that the gradle parent process is doing. Tasks that can be run in parallel include building targets and running tests. Notice that typical dependency rules apply. The number of tasks running concurrently will be limited by the maxWorkerCount that gradle is passed or configures (which is typically the # of CPUs). + +Each project within the project has the same base test configuration. Targets include `test`, `slowTest`, `isolatedTest`, and `fullTest`, which are defined within the root `build.gradle` file. Those targets are defined via the @Tag("NAME") attribute on each test (class or method). A project's `test` tasks will be run as part of its `build` task. + +A summary of each project's target composition is as follows. Notice that `allTests` exists at the top-level project to depend upon ALL of the test tasks across all of the projects. + +| Target | Composition | Purpose | +|---|---|---| +|`slowTest`| `@Tag("longTest")`| Tests that are too slow to provide value for every build run | +|`isolatedTest`| `@Tag("isolatedTest")`| Tests that may skew more toward integration tests and may take seconds to minutes to run. While these tests may require more time, they shouldn't require exhaustive use of resources or be sensitive to other tasks running in parallel | +|`test`| all other tests NOT marked with the tags above ("longTest" or "isolatedTest")| Tests that require significant or exclusive use of resources or have sensitive performance bounds | +|`fullTest`| a task dependent upon the tasks above | Convenience Task | + +The `isolatedTest` task (for each project) will run each of the tagged tests in serial and will run the isolatedTest task itself in serial from all of the other tasks within the project. While the `isolatedTest` task isn't marked as dependent upon the other tests, it is marked to run _after_ other tests if gradle is set to run them. That eliminates the requirement that test or slowTest run BEFORE the isolatedTest target when a developer is trying to only run the isolatedTest target. Likewise, `slowTest` isn't dependent upon `test`, but those two targets may run in parallel since there aren't isolation requirements. Parallelization for the test runners IS configured for `test` and `slowTest` targets so that those tests may complete quicker on hardware with more capacity. + +## Traffic Capture Memory Leak Detections + +TrafficCapture overrides the test and slowTest targets to enable netty leak detection only for slowTest. The regular test targets for the TrafficCapture subprojects sets and environment variable to disable leak detection and the `slowTest` target for those subprojects leaves the `disableMemoryLeakTests` unset but alters the tag definition to include all tests but those tagged with isolatedTest. From 132519133c853980975b8151f297b7c8832997d2 Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 14:26:03 -0400 Subject: [PATCH 22/38] Fix some typos that misclassified Full Replayer tests not as slow tests TODO: rename the tags and gradle targets to just be the same. Signed-off-by: Greg Schohn --- .../migrations/replay/e2etests/FullTrafficReplayerTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java index b495293d5..eb31a7762 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/e2etests/FullTrafficReplayerTest.java @@ -234,7 +234,7 @@ public void testSingleStreamWithCloseIsCommitted() throws Throwable { @Test @ResourceLock("TrafficReplayerRunner") - @Tag("slowTest") + @Tag("longTest") public void fullTestWithThrottledStart() throws Throwable { var random = new Random(1); try ( @@ -357,7 +357,7 @@ public void makeSureThatCollateralDamageDoesntFreezeTests() throws Throwable { @ParameterizedTest @CsvSource(value = { "3,false", "-1,false", "3,true", "-1,true", }) - @Tag("slowTest") + @Tag("longTest") @ResourceLock("TrafficReplayerRunner") public void fullTestWithRestarts(int testSize, boolean randomize) throws Throwable { From 05d5d07205c0b07f9e550fea185ca6f5bccf13ee Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 14:28:14 -0400 Subject: [PATCH 23/38] Minor code cleanup Signed-off-by: Greg Schohn --- .../proxyserver/netty/ExpiringSubstitutableItemPoolTest.java | 2 -- gradle.properties | 1 - 2 files changed, 3 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java index 15d038ac3..b4e11dae3 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/ExpiringSubstitutableItemPoolTest.java @@ -15,7 +15,6 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.parallel.Isolated; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.DefaultPromise; @@ -25,7 +24,6 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -@Isolated("Isolation based on temporal checks") @Tag("isolatedTest") class ExpiringSubstitutableItemPoolTest { diff --git a/gradle.properties b/gradle.properties index a7d29129d..dd1bb1d07 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,6 +1,5 @@ org.gradle.caching=true org.gradle.configuration-cache=true -#org.gradle.configureondemand=true # Set Gradle Daemon's idle timeout to 30 minutes org.gradle.daemon.idletimeout=1800000 org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 From b465c4a8ea16a5bb193102446048ca8408a00d40 Mon Sep 17 00:00:00 2001 From: Peter Nied Date: Tue, 17 Sep 2024 14:36:55 -0500 Subject: [PATCH 24/38] Add a welcome message for the migration console (#958) This will fix a todo that the user should know they get into the migration console on the documentation https://github.com/opensearch-project/opensearch-migrations/wiki/Accessing-the-Migration-Console Signed-off-by: Peter Nied --- .../dockerSolution/src/main/docker/migrationConsole/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile index c35b7de6d..d2c5b2c9e 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/Dockerfile @@ -56,7 +56,8 @@ WORKDIR /root RUN echo '. /.venv/bin/activate' >> /etc/profile.d/venv.sh RUN dnf install -y bash-completion RUN echo '. /etc/profile.d/bash_completion.sh' >> ~/.bashrc && \ - echo '. /etc/profile.d/venv.sh' >> ~/.bashrc + echo '. /etc/profile.d/venv.sh' >> ~/.bashrc && \ + echo '@echo Welcome to the Migration Assistant Console' >> ~/.bashrc # Set ENV to control startup script in /bin/sh mode ENV ENV=/root/.bashrc From ab00eddf1be8266d393e8d3b727bf4d089b955fb Mon Sep 17 00:00:00 2001 From: Mikayla Thompson Date: Tue, 17 Sep 2024 13:50:19 -0600 Subject: [PATCH 25/38] Add cluster version to yaml & rfs command (#960) * add cluster version to yaml & rfs command Signed-off-by: Mikayla Thompson * Add test and documentation Signed-off-by: Mikayla Thompson * Review comments on documentation Signed-off-by: Mikayla Thompson --------- Signed-off-by: Mikayla Thompson --- .../lib/console_link/README.md | 2 ++ .../console_link/models/cluster.py | 2 ++ .../console_link/models/metadata.py | 7 +++++- .../lib/console_link/tests/test_metadata.py | 24 +++++++++++++++++++ .../lib/migration-services-yaml.ts | 7 +++--- .../lib/opensearch-domain-stack.ts | 2 +- .../service-stacks/migration-console-stack.ts | 1 + .../reindex-from-snapshot-stack.ts | 2 ++ .../lib/stack-composer.ts | 1 + .../opensearch-service-migration/options.md | 2 +- 10 files changed, 44 insertions(+), 6 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/README.md b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/README.md index 4e7c0be3d..66159b817 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/README.md +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/README.md @@ -92,6 +92,7 @@ Source and target clusters have the following options: - `endpoint`: required, the endpoint to reach the cluster. - `allow_insecure`: optional, default is false, equivalent to the curl `--insecure` flag, will not verify unsigned or invalid certificates +- `version`: optional, default is to assume a version compatible with ES 7 or OS 1. Format should be `ES_7.10.2` or `OS_2.15`, for instance. Exactly one of the following blocks must be present: @@ -198,6 +199,7 @@ The metadata migration moves indices, components, and templates from a snapshot - `index_allowlist`: optional, a list of index names. If this key is provided, only the named indices will be migrated. If the field is not provided, all non-system indices will be migrated. - `index_template_allowlist`: optional, a list of index template names. If this key is provided, only the named templates will be migrated. If the field is not provided, all templates will be migrated. - `component_template_allowlist`: optional, a list of component template names. If this key is provided, only the named component templates will be migrated. If the field is not provided, all component templates will be migrated. +- `source_cluster_version`: optional, defaults to `ES_7.10.2`, which should work for closely related versions. Version of the source cluster from which the snapshot was taken and used for handling incompatible settings between versions. - `from_snapshot`: required. As mentioned above, `from_snapshot` is the only allowable source for a metadata migration at this point. This key must be present, but if it's value is null/empty, the snapshot details will be pulled from the top-level `snapshot` object. If a `snapshot` object does not exist, this block must be populated. - `snapshot_name`: required, as described in the Snapshot section - `s3` or `fs` block: exactly one must be present, as described in the Snapshot section diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py index 77a8be621..c56496530 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/cluster.py @@ -58,6 +58,7 @@ "schema": { "endpoint": {"type": "string", "required": True}, "allow_insecure": {"type": "boolean", "required": False}, + "version": {"type": "string", "required": False}, "no_auth": NO_AUTH_SCHEMA, "basic_auth": BASIC_AUTH_SCHEMA, "sigv4": SIGV4_SCHEMA @@ -73,6 +74,7 @@ class Cluster: """ endpoint: str = "" + version: Optional[str] = None aws_secret_arn: Optional[str] = None auth_type: Optional[AuthMethod] = None auth_details: Optional[Dict[str, Any]] = None diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py index 526f029d7..ddf5617f3 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py @@ -52,7 +52,8 @@ "min_replicas": {"type": "integer", "min": 0, "required": False}, "index_allowlist": list_schema(required=False), "index_template_allowlist": list_schema(required=False), - "component_template_allowlist": list_schema(required=False) + "component_template_allowlist": list_schema(required=False), + "source_cluster_version": {"type": "string", "required": False} } @@ -80,6 +81,7 @@ def __init__(self, config, target_cluster: Cluster, snapshot: Optional[Snapshot] self._index_template_allowlist = config.get("index_template_allowlist", None) self._component_template_allowlist = config.get("component_template_allowlist", None) self._otel_endpoint = config.get("otel_endpoint", None) + self._source_cluster_version = config.get("source_cluster_version", None) logger.debug(f"Min replicas: {self._min_replicas}") logger.debug(f"Index allowlist: {self._index_allowlist}") @@ -226,6 +228,9 @@ def migrateOrEvaluate(self, command: str, extra_args=None) -> CommandResult: if self._component_template_allowlist: command_args.update({"--component-template-allowlist": ",".join(self._component_template_allowlist)}) + if self._source_cluster_version: + command_args.update({"--source-version": self._source_cluster_version}) + # Extra args might not be represented with dictionary, so convert args to list and append commands self._appendArgs(command_args, extra_args) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metadata.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metadata.py index 10b178302..23e797610 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metadata.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metadata.py @@ -141,6 +141,30 @@ def test_full_config_and_snapshot_gives_priority_to_config(s3_snapshot): assert metadata._local_dir == config["from_snapshot"]["local_dir"] +def test_full_config_with_version_includes_version_string_in_subprocess(s3_snapshot, mocker): + config = { + "from_snapshot": { + "local_dir": "/tmp/s3", + "snapshot_name": "reindex_from_snapshot", + "s3": { + "repo_uri": "s3://my-bucket", + "aws_region": "us-east-1" + }, + }, + "source_cluster_version": "ES_6.8" + + } + metadata = Metadata(config, create_valid_cluster(), s3_snapshot) + + mock = mocker.patch("subprocess.run") + metadata.migrate() + + mock.assert_called_once() + actual_call_args = mock.call_args.args[0] + assert '--source-version' in actual_call_args + assert config['source_cluster_version'] in actual_call_args + + def test_metadata_with_s3_snapshot_makes_correct_subprocess_call(mocker): config = { "from_snapshot": { diff --git a/deployment/cdk/opensearch-service-migration/lib/migration-services-yaml.ts b/deployment/cdk/opensearch-service-migration/lib/migration-services-yaml.ts index c456d494b..5b0dcc23c 100644 --- a/deployment/cdk/opensearch-service-migration/lib/migration-services-yaml.ts +++ b/deployment/cdk/opensearch-service-migration/lib/migration-services-yaml.ts @@ -1,13 +1,12 @@ -import { EngineVersion } from 'aws-cdk-lib/aws-opensearchservice'; import { ClusterAuth } from './common-utilities'; import * as yaml from 'yaml'; export class ClusterYaml { endpoint: string = ''; - version?: EngineVersion; + version?: string; auth: ClusterAuth; - constructor({endpoint, auth, version} : {endpoint: string, auth: ClusterAuth, version?: EngineVersion}) { + constructor({endpoint, auth, version} : {endpoint: string, auth: ClusterAuth, version?: string}) { this.endpoint = endpoint; this.auth = auth; this.version = version; @@ -15,6 +14,7 @@ export class ClusterYaml { toDict() { return { endpoint: this.endpoint, + version: this.version, ...this.auth.toDict(), // TODO: figure out how version should be incorporated // https://opensearch.atlassian.net/browse/MIGRATIONS-1951 @@ -108,6 +108,7 @@ export class MetadataMigrationYaml { from_snapshot: null = null; min_replicas: number = 1; otel_endpoint: string = ''; + source_cluster_version?: string; } export class MSKYaml { diff --git a/deployment/cdk/opensearch-service-migration/lib/opensearch-domain-stack.ts b/deployment/cdk/opensearch-service-migration/lib/opensearch-domain-stack.ts index eab4807df..84f9cbb8d 100644 --- a/deployment/cdk/opensearch-service-migration/lib/opensearch-domain-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/opensearch-domain-stack.ts @@ -123,7 +123,7 @@ export class OpenSearchDomainStack extends Stack { } else { clusterAuth.noAuth = new ClusterNoAuth(); } - this.targetClusterYaml = new ClusterYaml({endpoint: `https://${domain.domainEndpoint}:443`, auth: clusterAuth, version}) + this.targetClusterYaml = new ClusterYaml({endpoint: `https://${domain.domainEndpoint}:443`, auth: clusterAuth, version: version.toString()}) } diff --git a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-console-stack.ts b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-console-stack.ts index c27c078e0..96135d343 100644 --- a/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-console-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/service-stacks/migration-console-stack.ts @@ -242,6 +242,7 @@ export class MigrationConsoleStack extends MigrationServiceCore { let servicesYaml = props.servicesYaml servicesYaml.source_cluster = props.sourceCluster servicesYaml.metadata_migration = new MetadataMigrationYaml(); + servicesYaml.metadata_migration.source_cluster_version = props.sourceCluster?.version if (props.otelCollectorEnabled) { const otelSidecarEndpoint = OtelCollectorSidecar.getOtelLocalhostEndpoint(); if (servicesYaml.metadata_migration) { diff --git a/deployment/cdk/opensearch-service-migration/lib/service-stacks/reindex-from-snapshot-stack.ts b/deployment/cdk/opensearch-service-migration/lib/service-stacks/reindex-from-snapshot-stack.ts index 17a6502f9..b42f3dde5 100644 --- a/deployment/cdk/opensearch-service-migration/lib/service-stacks/reindex-from-snapshot-stack.ts +++ b/deployment/cdk/opensearch-service-migration/lib/service-stacks/reindex-from-snapshot-stack.ts @@ -25,6 +25,7 @@ export interface ReindexFromSnapshotProps extends StackPropsExt { readonly extraArgs?: string, readonly otelCollectorEnabled: boolean, readonly clusterAuthDetails: ClusterAuth + readonly sourceClusterVersion?: string } export class ReindexFromSnapshotStack extends MigrationServiceCore { @@ -71,6 +72,7 @@ export class ReindexFromSnapshotStack extends MigrationServiceCore { let rfsCommand = `/rfs-app/runJavaWithClasspath.sh com.rfs.RfsMigrateDocuments --s3-local-dir /tmp/s3_files --s3-repo-uri ${s3Uri} --s3-region ${this.region} --snapshot-name rfs-snapshot --lucene-dir '/lucene' --target-host ${osClusterEndpoint}` rfsCommand = props.clusterAuthDetails.sigv4 ? rfsCommand.concat(`--target-aws-service-signing-name ${props.clusterAuthDetails.sigv4.serviceSigningName} --target-aws-region ${props.clusterAuthDetails.sigv4.region}`) : rfsCommand rfsCommand = props.otelCollectorEnabled ? rfsCommand.concat(` --otel-collector-endpoint ${OtelCollectorSidecar.getOtelLocalhostEndpoint()}`) : rfsCommand + rfsCommand = props.sourceClusterVersion ? rfsCommand.concat(` --source-version ${props.sourceClusterVersion}`) : rfsCommand rfsCommand = parseAndMergeArgs(rfsCommand, props.extraArgs); let targetUser = ""; diff --git a/deployment/cdk/opensearch-service-migration/lib/stack-composer.ts b/deployment/cdk/opensearch-service-migration/lib/stack-composer.ts index 04d5b54d0..bb827a59e 100644 --- a/deployment/cdk/opensearch-service-migration/lib/stack-composer.ts +++ b/deployment/cdk/opensearch-service-migration/lib/stack-composer.ts @@ -475,6 +475,7 @@ export class StackComposer { vpc: networkStack.vpc, extraArgs: reindexFromSnapshotExtraArgs, clusterAuthDetails: servicesYaml.target_cluster?.auth, + sourceClusterVersion: sourceCluster?.version, stackName: `OSMigrations-${stage}-${region}-ReindexFromSnapshot`, description: "This stack contains resources to assist migrating historical data, via Reindex from Snapshot, to a target cluster", stage: stage, diff --git a/deployment/cdk/opensearch-service-migration/options.md b/deployment/cdk/opensearch-service-migration/options.md index f66f43875..17b972311 100644 --- a/deployment/cdk/opensearch-service-migration/options.md +++ b/deployment/cdk/opensearch-service-migration/options.md @@ -51,7 +51,7 @@ In all other cases, the required components of each cluster object are: The optional component is: -- `version` -- the Elasticsearch or OpenSearch version of the cluster, in the format of `OS_x.y` or `ES_x.y` +- `version` -- the Elasticsearch or OpenSearch version of the cluster, in the format of `OS_x.y` or `ES_x.y` This will be passed to the ReindexFromSnapshot service, if enabled, and provided for the metadata migration on the Migration Console. It defaults to `ES_7.10.2`. ### Reindex from Snapshot (RFS) Service Options From 8336a8765eb145ad04ccc2ecd911347ef887ba3b Mon Sep 17 00:00:00 2001 From: Andre Kurait Date: Tue, 17 Sep 2024 15:30:31 -0500 Subject: [PATCH 26/38] Add customizeable test log level for test and slowTests Signed-off-by: Andre Kurait --- .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ .../src/test/resources/log4j2.properties | 13 ++++++++++--- .../src/test/resources/log4j2.properties | 9 +++++++-- RFS/src/test/resources/log4j2.properties | 12 ++++++++---- .../src/test/resources/log4j2.properties | 10 +++++++++- .../src/test/resources/log4j2.properties | 10 +++++++++- .../src/test/resources/log4j2.properties | 11 +++++++++-- .../src/test/resources/log4j2.properties | 10 +++++++++- ...rafficToHttpTransactionAccumulatorTest.java | 2 ++ .../src/test/resources/log4j2.properties | 10 +++++++++- .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ build.gradle | 2 ++ .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ .../src/test/resources/log4j2.properties | 18 ++++++++++++++++++ .../src/test/resources/log4j2.properties | 10 +++++++++- 18 files changed, 209 insertions(+), 16 deletions(-) create mode 100644 CreateSnapshot/src/test/resources/log4j2.properties create mode 100644 TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJMESPathMessageTransformerProvider/src/test/resources/log4j2.properties create mode 100644 TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/resources/log4j2.properties create mode 100644 TrafficCapture/transformationPlugins/jsonMessageTransformers/openSearch23PlusTargetTransformerProvider/src/test/resources/log4j2.properties create mode 100644 awsUtilities/src/test/resources/log4j2.properties create mode 100644 coreUtilities/src/test/resources/log4j2.properties create mode 100644 dashboardsSanitizer/src/test/resources/log4j2.properties diff --git a/CreateSnapshot/src/test/resources/log4j2.properties b/CreateSnapshot/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/CreateSnapshot/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/DocumentsFromSnapshotMigration/src/test/resources/log4j2.properties b/DocumentsFromSnapshotMigration/src/test/resources/log4j2.properties index 1f9ecf437..a389da4cd 100644 --- a/DocumentsFromSnapshotMigration/src/test/resources/log4j2.properties +++ b/DocumentsFromSnapshotMigration/src/test/resources/log4j2.properties @@ -1,7 +1,8 @@ -status = ERROR +status = WARN property.logsDir = ${env:SHARED_LOGS_DIR_PATH:-./logs} property.failedLoggerFileNamePrefix = ${logsDir}/${hostName}/failedRequests/failedRequests +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} appenders = console, FailedRequests @@ -26,11 +27,17 @@ appender.console.type = Console appender.console.name = Console appender.console.target = SYSTEM_OUT appender.console.layout.type = PatternLayout -appender.console.layout.pattern = %d{HH:mm:ss.SSS} %threadName %-5p %c{1}:%L - %m%n +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n rootLogger.level = info rootLogger.appenderRef.console.ref = Console +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} + logger.wireLogger.name = org.apache.http.wire logger.wireLogger.level = OFF -logger.wireLogger.additivity = false \ No newline at end of file +logger.wireLogger.additivity = false diff --git a/MetadataMigration/src/test/resources/log4j2.properties b/MetadataMigration/src/test/resources/log4j2.properties index 61c99871f..7171a6322 100644 --- a/MetadataMigration/src/test/resources/log4j2.properties +++ b/MetadataMigration/src/test/resources/log4j2.properties @@ -8,11 +8,16 @@ appender.console.target = SYSTEM_OUT appender.console.layout.type = PatternLayout appender.console.layout.pattern = %m%n +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-DEBUG} + rootLogger.level = info rootLogger.appenderRef.console.ref = Console +# Allow customization of owned package logs logger.rfs.name = com.rfs -logger.rfs.level = debug +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} logger.migrations.name = com.opensearch.migrations logger.migrations.level = debug @@ -32,4 +37,4 @@ logger.dockerclientdeps.level = info logger.wireLogger.name = org.apache.http.wire logger.wireLogger.level = OFF -logger.wireLogger.additivity = false \ No newline at end of file +logger.wireLogger.additivity = false diff --git a/RFS/src/test/resources/log4j2.properties b/RFS/src/test/resources/log4j2.properties index b9b9653ce..ea5f68cc2 100644 --- a/RFS/src/test/resources/log4j2.properties +++ b/RFS/src/test/resources/log4j2.properties @@ -1,6 +1,8 @@ # Set the status level for the configuration status = DEBUG +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-DEBUG} + # Define the root logger rootLogger.level = info rootLogger.appenderRef.console.ref = Console @@ -9,11 +11,13 @@ rootLogger.appenderRef.console.ref = Console appender.console.type = Console appender.console.name = Console appender.console.layout.type = PatternLayout -appender.console.layout.pattern = %d{HH:mm:ss.SSS} %threadName %-5p %c{1}:%L - %m%n +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n -# Logger definitions +# Allow customization of owned package logs logger.rfs.name = com.rfs -logger.rfs.level = debug +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} logger.wire.name = org.apache.hc.client5.http logger.wire.level = info @@ -33,4 +37,4 @@ logger.WorkCoordinator.appenderRef.stdout.ref = WorkCoordinator appender.WorkCoordinator.type = Console appender.WorkCoordinator.name = WorkCoordinator appender.WorkCoordinator.layout.type = PatternLayout -appender.WorkCoordinator.layout.pattern = %d{HH:mm:ss.SSS} [%t] %-5level %logger{36} [worker=%X{workerId}]- %msg%n \ No newline at end of file +appender.WorkCoordinator.layout.pattern = %d{HH:mm:ss.SSS} [%t] %-5level %logger{36} [worker=%X{workerId}]- %msg%n diff --git a/TrafficCapture/captureKafkaOffloader/src/test/resources/log4j2.properties b/TrafficCapture/captureKafkaOffloader/src/test/resources/log4j2.properties index b698220a3..5e063eb26 100644 --- a/TrafficCapture/captureKafkaOffloader/src/test/resources/log4j2.properties +++ b/TrafficCapture/captureKafkaOffloader/src/test/resources/log4j2.properties @@ -1,4 +1,6 @@ -status = error +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-DEBUG} # Root logger options rootLogger.level = debug @@ -9,3 +11,9 @@ appender.console.type = Console appender.console.name = Console appender.console.layout.type = PatternLayout appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%equals{ ctx=%mdc}{ ctx=\{\}}{}%n + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/captureOffloader/src/test/resources/log4j2.properties b/TrafficCapture/captureOffloader/src/test/resources/log4j2.properties index b698220a3..5e063eb26 100644 --- a/TrafficCapture/captureOffloader/src/test/resources/log4j2.properties +++ b/TrafficCapture/captureOffloader/src/test/resources/log4j2.properties @@ -1,4 +1,6 @@ -status = error +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-DEBUG} # Root logger options rootLogger.level = debug @@ -9,3 +11,9 @@ appender.console.type = Console appender.console.name = Console appender.console.layout.type = PatternLayout appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%equals{ ctx=%mdc}{ ctx=\{\}}{}%n + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/nettyWireLogging/src/test/resources/log4j2.properties b/TrafficCapture/nettyWireLogging/src/test/resources/log4j2.properties index 0636de958..79f28d0bb 100644 --- a/TrafficCapture/nettyWireLogging/src/test/resources/log4j2.properties +++ b/TrafficCapture/nettyWireLogging/src/test/resources/log4j2.properties @@ -1,5 +1,8 @@ -status = error +status = WARN +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-DEBUG} + +# Root logger options rootLogger.level = debug appender.console.type = Console @@ -8,4 +11,8 @@ appender.console.target = SYSTEM_ERR appender.console.layout.type = PatternLayout appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} [%t] %c{1} - %msg%equals{ ctx=%mdc}{ ctx=\{\}}{}%n -rootLogger.appenderRef.stderr.ref = STDERR +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties b/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties index 59caa9889..da3da9bb4 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/resources/log4j2.properties @@ -1,4 +1,6 @@ -status = error +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} appender.console.type = Console appender.console.name = STDERR @@ -9,3 +11,9 @@ appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} [% rootLogger.level = info rootLogger.appenderRefs = stderr rootLogger.appenderRef.stderr.ref = STDERR + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java index ec3a94c8f..92be443c7 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest.java @@ -10,6 +10,7 @@ import java.util.stream.IntStream; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; @@ -23,6 +24,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j +@Tag("longTest") public class ExhaustiveCapturedTrafficToHttpTransactionAccumulatorTest extends InstrumentationTest { public static Arguments[] generateAllTestsAndConfirmComplete(IntStream seedStream) { diff --git a/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties b/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties index c97fd4ccb..7136cf5aa 100644 --- a/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties +++ b/TrafficCapture/trafficReplayer/src/test/resources/log4j2.properties @@ -1,4 +1,6 @@ -status = error +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} # Root logger options rootLogger.level = info @@ -14,3 +16,9 @@ appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t # of the logs for tests logger.OutputTupleJsonLogger.name = OutputTupleJsonLogger logger.OutputTupleJsonLogger.level = OFF + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJMESPathMessageTransformerProvider/src/test/resources/log4j2.properties b/TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJMESPathMessageTransformerProvider/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJMESPathMessageTransformerProvider/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/resources/log4j2.properties b/TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/TrafficCapture/transformationPlugins/jsonMessageTransformers/jsonJoltMessageTransformerProvider/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/TrafficCapture/transformationPlugins/jsonMessageTransformers/openSearch23PlusTargetTransformerProvider/src/test/resources/log4j2.properties b/TrafficCapture/transformationPlugins/jsonMessageTransformers/openSearch23PlusTargetTransformerProvider/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/TrafficCapture/transformationPlugins/jsonMessageTransformers/openSearch23PlusTargetTransformerProvider/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/awsUtilities/src/test/resources/log4j2.properties b/awsUtilities/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/awsUtilities/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/build.gradle b/build.gradle index 03b5e0b8b..1e0efd760 100644 --- a/build.gradle +++ b/build.gradle @@ -89,12 +89,14 @@ subprojects { // Mutually exclusive tests to avoid duplication tasks.named('test') { + systemProperty 'migrationLogLevel', 'TRACE' useJUnitPlatform { excludeTags('longTest', 'isolatedTest') } } tasks.register('slowTest', Test) { + systemProperty 'migrationLogLevel', 'DEBUG' useJUnitPlatform { includeTags 'longTest' excludeTags 'isolatedTest' diff --git a/coreUtilities/src/test/resources/log4j2.properties b/coreUtilities/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/coreUtilities/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/dashboardsSanitizer/src/test/resources/log4j2.properties b/dashboardsSanitizer/src/test/resources/log4j2.properties new file mode 100644 index 000000000..4f87b2f62 --- /dev/null +++ b/dashboardsSanitizer/src/test/resources/log4j2.properties @@ -0,0 +1,18 @@ +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-INFO} + +appender.console.type = Console +appender.console.name = Console +appender.console.target = SYSTEM_OUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%n + +rootLogger.level = info +rootLogger.appenderRef.console.ref = Console + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} diff --git a/transformation/src/test/resources/log4j2.properties b/transformation/src/test/resources/log4j2.properties index b698220a3..25b4245ff 100644 --- a/transformation/src/test/resources/log4j2.properties +++ b/transformation/src/test/resources/log4j2.properties @@ -1,4 +1,6 @@ -status = error +status = WARN + +property.ownedPackagesLogLevel=${sys:migrationLogLevel:-debug} # Root logger options rootLogger.level = debug @@ -9,3 +11,9 @@ appender.console.type = Console appender.console.name = Console appender.console.layout.type = PatternLayout appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss,SSS}{UTC} %p %c{1.} [%t] %m%equals{ ctx=%mdc}{ ctx=\{\}}{}%n + +# Allow customization of owned package logs +logger.rfs.name = com.rfs +logger.rfs.level = ${ownedPackagesLogLevel} +logger.migration.name = org.opensearch.migrations +logger.migration.level = ${ownedPackagesLogLevel} From 1ecbb08e57c857b30b785828ab998666e6e454db Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 17:37:48 -0400 Subject: [PATCH 27/38] PR Feedback to clean up some ByteBuf hygiene in the HeaderAdderHandler. Signed-off-by: Greg Schohn --- .../trafficcapture/proxyserver/netty/HeaderAdderHandler.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java index e4bf3d528..1eefb08a1 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/netty/HeaderAdderHandler.java @@ -35,9 +35,10 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception useCarriageReturn = true; } else if (nextByte == '\n') { final var upToIndex = buf.readerIndex(); - var composite = Unpooled.compositeBuffer(4); + var composite = ctx.alloc().compositeBuffer(4); buf.resetReaderIndex(); - composite.addComponent(true, buf.retainedSlice(0, upToIndex)); + final var startingIndex = buf.readerIndex(); + composite.addComponent(true, buf.retainedSlice(startingIndex, upToIndex-startingIndex)); composite.addComponent(true, headerLineToAdd.retainedDuplicate()); composite.addComponent(true, (useCarriageReturn ? CRLF_BYTE_BUF : LF_BYTE_BUF).duplicate()); composite.addComponent(true, buf.retainedSlice(upToIndex, buf.readableBytes()-upToIndex)); From 2b90cdce059be8a0bb2ee47c5f73d337ddc32911 Mon Sep 17 00:00:00 2001 From: Andre Kurait Date: Tue, 17 Sep 2024 20:37:32 -0500 Subject: [PATCH 28/38] Fixed SonarQube bugs in coreUtilities Signed-off-by: Andre Kurait --- .../migrations/tracing/CommonScopedMetricInstruments.java | 2 +- .../org/opensearch/migrations/tracing/RootOtelContext.java | 1 - .../java/org/opensearch/migrations/utils/ProcessHelpers.java | 4 +++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java b/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java index 0d7d45b69..504e534a0 100644 --- a/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java +++ b/coreUtilities/src/main/java/org/opensearch/migrations/tracing/CommonScopedMetricInstruments.java @@ -70,7 +70,7 @@ public CommonScopedMetricInstruments(Meter meter, ScopeLabels stockMetricLabels, } private static List getBuckets(double firstBucketSize, double lastBucketCeiling) { - var buckets = getExponentialBucketsBetween(firstBucketSize, lastBucketCeiling, 2.0); + var buckets = getExponentialBucketsBetween(firstBucketSize, lastBucketCeiling); log.atTrace() .setMessage( () -> "Setting buckets to " diff --git a/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java b/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java index 3e869cef3..3aafc7876 100644 --- a/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java +++ b/coreUtilities/src/main/java/org/opensearch/migrations/tracing/RootOtelContext.java @@ -177,7 +177,6 @@ private static Span buildSpanWithParent(SpanBuilder builder, Span parentSpan, St String spanName, Stream linkedSpans ) { - assert forScope.getCurrentSpan() == null; var forEnclosingScope = forScope.getEnclosingScope(); var parentSpan = forEnclosingScope == null ? null : forEnclosingScope.getCurrentSpan(); var spanBuilder = getOpenTelemetry().getTracer(scopeName).spanBuilder(spanName); diff --git a/coreUtilities/src/main/java/org/opensearch/migrations/utils/ProcessHelpers.java b/coreUtilities/src/main/java/org/opensearch/migrations/utils/ProcessHelpers.java index d2d2dce02..6e4957755 100644 --- a/coreUtilities/src/main/java/org/opensearch/migrations/utils/ProcessHelpers.java +++ b/coreUtilities/src/main/java/org/opensearch/migrations/utils/ProcessHelpers.java @@ -7,7 +7,9 @@ @Slf4j public class ProcessHelpers { - private static final String DEFAULT_NODE_ID = "generated_" + UUID.randomUUID().toString(); + private static final String DEFAULT_NODE_ID = "generated_" + UUID.randomUUID(); + + private ProcessHelpers() {} public static String getNodeInstanceName() { var nodeId = Optional.of("ECS_TASK_ID").map(System::getenv) From 240879202980261c78361314549e066820049aff Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Tue, 17 Sep 2024 23:41:38 -0400 Subject: [PATCH 29/38] MIGRATIONS-2014 - parse the config file w/ jackson instead of doing it poorly manually. Redactions are still in place, but structures (like lists) should be preserved, rather than Signed-off-by: Greg Schohn --- .../elasticsearchWithSearchGuard/Dockerfile | 3 +- .../trafficCaptureProxyServer/build.gradle | 8 +++-- .../proxyserver/CaptureProxy.java | 31 +++++++++++-------- 3 files changed, 25 insertions(+), 17 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/elasticsearchWithSearchGuard/Dockerfile b/TrafficCapture/dockerSolution/src/main/docker/elasticsearchWithSearchGuard/Dockerfile index 2e77e4eef..9ab4c977c 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/elasticsearchWithSearchGuard/Dockerfile +++ b/TrafficCapture/dockerSolution/src/main/docker/elasticsearchWithSearchGuard/Dockerfile @@ -19,7 +19,8 @@ RUN sed 's/searchguard/plugins.security/g' $ELASTIC_SEARCH_CONFIG_FILE | \ # but maybe not for a demo to show individual steps RUN /root/enableTlsConfig.sh $ELASTIC_SEARCH_CONFIG_FILE # Alter this config line to either enable(searchguard.disabled: false) or disable(searchguard.disabled: true) HTTP auth -RUN echo "searchguard.disabled: false" >> $ELASTIC_SEARCH_CONFIG_FILE +RUN echo -n "searchguard.disabled: false" >> $ELASTIC_SEARCH_CONFIG_FILE && \ + echo -n "plugins.security.ssl.http.enabled_protocols: ['TLSv1.2', 'TLSv1.3']" >> $PROXY_TLS_CONFIG_FILE RUN sed -i '/^-Xms/i # Increase default heap to 80% RAM, Requires JDK >= 10' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \ sed -i 's/^-Xms/#&/' $ELASTIC_SEARCH_JVM_CONFIG_FILE && \ diff --git a/TrafficCapture/trafficCaptureProxyServer/build.gradle b/TrafficCapture/trafficCaptureProxyServer/build.gradle index 1fb3f7e1a..bb3430705 100644 --- a/TrafficCapture/trafficCaptureProxyServer/build.gradle +++ b/TrafficCapture/trafficCaptureProxyServer/build.gradle @@ -32,15 +32,17 @@ dependencies { implementation project(':TrafficCapture:captureKafkaOffloader') implementation project(':coreUtilities') + implementation group: "com.google.protobuf", name: "protobuf-java" + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core' + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind' + implementation group: 'com.lmax', name: 'disruptor' implementation group: 'io.netty', name: 'netty-all' implementation group: 'org.apache.logging.log4j', name: 'log4j-api' implementation group: 'org.apache.logging.log4j', name: 'log4j-core' implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl' + implementation group: 'org.jcommander', name: 'jcommander' implementation group: 'org.slf4j', name: 'slf4j-api' - implementation group: 'com.lmax', name: 'disruptor' - implementation group: 'org.jcommander', name: 'jcommander' - implementation group: "com.google.protobuf", name: "protobuf-java" testImplementation project(':TrafficCapture:captureProtobufs') testImplementation testFixtures(project(path: ':testHelperFixtures')) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index bbb350147..930d68c6f 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -1,10 +1,10 @@ package org.opensearch.migrations.trafficcapture.proxyserver; +import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.OutputStream; import java.net.URI; -import java.nio.file.Files; import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; @@ -16,11 +16,15 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; +import java.util.stream.Collectors; import java.util.stream.Stream; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; import com.google.protobuf.CodedOutputStream; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -143,19 +147,20 @@ static Parameters parseArgs(String[] args) { @SneakyThrows protected static Settings getSettings(@NonNull String configFile) { - var builder = Settings.builder(); - try (var lines = Files.lines(Paths.get(configFile))) { - lines.map( - line -> Optional.of(line.indexOf('#')).filter(i -> i >= 0).map(i -> line.substring(0, i)).orElse(line) - ).filter(line -> line.startsWith(HTTPS_CONFIG_PREFIX) && line.contains(":")).forEach(line -> { - var parts = line.split(": *", 2); - builder.put(parts[0], parts[1]); - }); - } - builder.put(SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED, false); + var objectMapper = new ObjectMapper(new YAMLFactory()); + var configMap = objectMapper.readValue(new File(configFile), Map.class); + var configParentDirStr = Paths.get(configFile).toAbsolutePath().getParent(); - builder.put("path.home", configParentDirStr); - return builder.build(); + var httpsSettings = + objectMapper.convertValue(configMap, new TypeReference>(){}) + .entrySet().stream() + .filter(kvp -> kvp.getKey().startsWith(HTTPS_CONFIG_PREFIX)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return Settings.builder().loadFromMap(httpsSettings) + .put(SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED, false) + .put("path.home", configParentDirStr) + .build(); } protected static IConnectionCaptureFactory getNullConnectionCaptureFactory() { From a5c4b7900b7e64b17b724fb7dc05456ca29d94e7 Mon Sep 17 00:00:00 2001 From: Mikayla Thompson Date: Wed, 18 Sep 2024 11:20:02 -0600 Subject: [PATCH 30/38] Improve migration console cli.py test coverage (#968) * Improve migration console cli.py test coverage Signed-off-by: Mikayla Thompson --- .../lib/console_link/console_link/cli.py | 11 +- .../console_link/console_link/environment.py | 2 +- .../console_link/middleware/kafka.py | 2 +- .../lib/console_link/tests/test_cli.py | 215 +++++++++++++++++- 4 files changed, 218 insertions(+), 12 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py index f2fb0bf5e..c7f5b0d01 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/cli.py @@ -7,6 +7,7 @@ import console_link.middleware.snapshot as snapshot_ import console_link.middleware.metadata as metadata_ import console_link.middleware.replay as replay_ +import console_link.middleware.kafka as kafka_ from console_link.models.utils import ExitCode from console_link.environment import Environment @@ -397,7 +398,7 @@ def kafka_group(ctx): @click.option('--topic-name', default="logging-traffic-topic", help='Specify a topic name to create') @click.pass_obj def create_topic_cmd(ctx, topic_name): - result = ctx.env.kafka.create_topic(topic_name=topic_name) + result = kafka_.create_topic(ctx.env.kafka, topic_name=topic_name) click.echo(result.value) @@ -408,13 +409,13 @@ def create_topic_cmd(ctx, topic_name): @click.pass_obj def delete_topic_cmd(ctx, acknowledge_risk, topic_name): if acknowledge_risk: - result = ctx.env.kafka.delete_topic(topic_name=topic_name) + result = kafka_.delete_topic(ctx.env.kafka, topic_name=topic_name) click.echo(result.value) else: if click.confirm('Deleting a topic will irreversibly delete all captured traffic records stored in that ' 'topic. Are you sure you want to continue?'): click.echo(f"Performing delete topic operation on {topic_name} topic...") - result = ctx.env.kafka.delete_topic(topic_name=topic_name) + result = kafka_.delete_topic(ctx.env.kafka, topic_name=topic_name) click.echo(result.value) else: click.echo("Aborting command.") @@ -424,7 +425,7 @@ def delete_topic_cmd(ctx, acknowledge_risk, topic_name): @click.option('--group-name', default="logging-group-default", help='Specify a group name to describe') @click.pass_obj def describe_group_command(ctx, group_name): - result = ctx.env.kafka.describe_consumer_group(group_name=group_name) + result = kafka_.describe_consumer_group(ctx.env.kafka, group_name=group_name) click.echo(result.value) @@ -432,7 +433,7 @@ def describe_group_command(ctx, group_name): @click.option('--topic-name', default="logging-traffic-topic", help='Specify a topic name to describe') @click.pass_obj def describe_topic_records_cmd(ctx, topic_name): - result = ctx.env.kafka.describe_topic_records(topic_name=topic_name) + result = kafka_.describe_topic_records(ctx.env.kafka, topic_name=topic_name) click.echo(result.value) # ##################### UTILITIES ################### diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py index 6b3ac93d2..8f2950c39 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/environment.py @@ -62,7 +62,7 @@ def __init__(self, config_file: str): self.target_cluster: Cluster = Cluster(self.config["target_cluster"]) logger.info(f"Target cluster initialized: {self.target_cluster.endpoint}") else: - logger.warn("No target cluster provided. This may prevent other actions from proceeding.") + logger.warning("No target cluster provided. This may prevent other actions from proceeding.") if 'metrics_source' in self.config: self.metrics_source: MetricsSource = get_metrics_source( diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/middleware/kafka.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/middleware/kafka.py index 7d5c9755a..2edabcba1 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/middleware/kafka.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/middleware/kafka.py @@ -21,5 +21,5 @@ def describe_consumer_group(kafka: Kafka, group_name: str) -> CommandResult: def describe_topic_records(kafka: Kafka, topic_name: str) -> CommandResult: - result = kafka.delete_topic(topic_name=topic_name) + result = kafka.describe_topic_records(topic_name=topic_name) return result diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py index 7cac86a63..6ace00371 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cli.py @@ -14,6 +14,7 @@ from console_link.models.command_result import CommandResult from console_link.models.ecs_service import ECSService, InstanceStatuses from console_link.models.kafka import StandardKafka +from console_link.models.metrics_source import Component from console_link.models.replayer_ecs import ECSReplayer TEST_DATA_DIRECTORY = pathlib.Path(__file__).parent / "data" @@ -42,6 +43,38 @@ def set_fake_aws_credentials(): os.environ['AWS_ACCESS_KEY_ID'] = 'AKIAIOSFODNN7EXAMPLE' os.environ['AWS_SECRET_ACCESS_KEY'] = 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY' + +@pytest.fixture +def source_cluster_only_yaml_path(tmp_path): + source_cluster_only_path = tmp_path / "source_cluster_only.yaml" + source_cluster_only_yaml = """ +source_cluster: + endpoint: "https://elasticsearch:9200" + allow_insecure: true + basic_auth: + username: "admin" + password: "admin" +""" + with open(source_cluster_only_path, 'w') as f: + f.write(source_cluster_only_yaml) + return source_cluster_only_path + + +@pytest.fixture +def target_cluster_only_yaml_path(tmp_path): + target_cluster_only_path = tmp_path / "target_cluster_only.yaml" + target_cluster_only_yaml = """ +target_cluster: + endpoint: "https://opensearchtarget:9200" + allow_insecure: true + basic_auth: + username: "admin" + password: "myStrongPassword123!" +""" + with open(target_cluster_only_path, 'w') as f: + f.write(target_cluster_only_yaml) + return target_cluster_only_path + # Tests around the general CLI functionality @@ -58,12 +91,36 @@ def test_cli_with_valid_services_file_does_not_raise_error(runner): assert result.exit_code == 0 +def test_cli_with_no_clusters_in_services_raises_error(runner, tmp_path): + no_cluster_services = """ +metrics_source: + prometheus: + endpoint: "http://prometheus:9090" +backfill: + reindex_from_snapshot: + docker: +snapshot: + snapshot_name: "test_snapshot" + fs: + repo_path: "/snapshot/test-console" + otel_endpoint: "http://otel-collector:4317" + """ + yaml_path = tmp_path / "services.yaml" + with open(yaml_path, 'w') as f: + f.write(no_cluster_services) + + result = runner.invoke(cli, ['--config-file', str(yaml_path), 'clusters', 'connection-check'], + catch_exceptions=True) + assert result.exit_code == 1 + assert isinstance(result.exception, SystemExit) + # The following tests are mostly smoke-tests with a goal of covering every CLI command and option. # They generally mock functions either at the logic or the model layer, though occasionally going all the way to # an external endpoint call. # Standardizing these in the future would be great, but the priority right now is getting overall coverage, and # testing that . + def test_cli_cluster_cat_indices(runner, mocker): middleware_mock = mocker.spy(middleware.clusters, 'cat_indices') api_mock = mocker.patch.object(Cluster, 'call_api') @@ -100,6 +157,52 @@ def test_cli_cluster_connection_check(runner, mocker): api_mock.assert_called() +def test_cli_cluster_cat_indices_and_connection_check_with_one_cluster(runner, mocker, + target_cluster_only_yaml_path, + source_cluster_only_yaml_path): + middleware_connection_check_mock = mocker.spy(middleware.clusters, 'connection_check') + middleware_cat_indices_mock = mocker.spy(middleware.clusters, 'cat_indices') + api_mock = mocker.patch.object(Cluster, 'call_api', autospec=True) + # Connection check with no target cluster + result = runner.invoke(cli, ['--config-file', str(source_cluster_only_yaml_path), 'clusters', 'connection-check'], + catch_exceptions=True) + assert result.exit_code == 0 + assert "SOURCE CLUSTER" in result.output + assert "No target cluster defined." in result.output + middleware_connection_check_mock.assert_called_once() + api_mock.assert_called_once() + middleware_connection_check_mock.reset_mock() + api_mock.reset_mock() + # Connection check with no source cluster + result = runner.invoke(cli, ['--config-file', str(target_cluster_only_yaml_path), 'clusters', 'connection-check'], + catch_exceptions=True) + assert result.exit_code == 0 + assert "TARGET CLUSTER" in result.output + assert "No source cluster defined." in result.output + middleware_connection_check_mock.assert_called_once() + api_mock.assert_called_once() + middleware_connection_check_mock.reset_mock() + api_mock.reset_mock() + # Cat indices with no target cluster + result = runner.invoke(cli, ['--config-file', str(source_cluster_only_yaml_path), 'clusters', 'cat-indices'], + catch_exceptions=True) + assert result.exit_code == 0 + assert "SOURCE CLUSTER" in result.output + assert "No target cluster defined." in result.output + middleware_cat_indices_mock.assert_called_once() + api_mock.assert_called_once() + middleware_cat_indices_mock.reset_mock() + api_mock.reset_mock() + # Cat indices with no source cluster + result = runner.invoke(cli, ['--config-file', str(target_cluster_only_yaml_path), 'clusters', 'cat-indices'], + catch_exceptions=True) + assert result.exit_code == 0 + assert "TARGET CLUSTER" in result.output + assert "No source cluster defined." in result.output + middleware_cat_indices_mock.assert_called_once() + api_mock.assert_called_once() + + def test_cli_cluster_run_test_benchmarks(runner, mocker): middleware_mock = mocker.spy(middleware.clusters, 'run_test_benchmarks') model_mock = mocker.patch.object(Cluster, 'execute_benchmark_workload') @@ -110,6 +213,16 @@ def test_cli_cluster_run_test_benchmarks(runner, mocker): assert result.exit_code == 0 +def test_cli_cluster_run_test_benchmarks_without_source_raises_error(runner, mocker, target_cluster_only_yaml_path): + middleware_mock = mocker.spy(middleware.clusters, 'run_test_benchmarks') + model_mock = mocker.patch.object(Cluster, 'execute_benchmark_workload') + result = runner.invoke(cli, ['--config-file', target_cluster_only_yaml_path, 'clusters', 'run-test-benchmarks'], + catch_exceptions=True) + middleware_mock.assert_not_called() + model_mock.assert_not_called() + assert result.exit_code == 2 + + def test_cli_cluster_clear_indices(runner, mocker): mock = mocker.patch('console_link.middleware.clusters.clear_indices') result = runner.invoke(cli, @@ -160,6 +273,13 @@ def test_cli_cat_indices_e2e(runner, env): assert target_cat_indices in result.output +def test_cli_snapshot_when_not_defined_raises_error(runner, source_cluster_only_yaml_path): + result = runner.invoke(cli, ['--config-file', source_cluster_only_yaml_path, 'snapshot', 'create'], + catch_exceptions=True) + assert result.exit_code == 2 + assert "Snapshot is not set" in result.output + + def test_cli_snapshot_create(runner, mocker): mock = mocker.patch('console_link.middleware.snapshot.create') @@ -201,6 +321,13 @@ def test_cli_with_backfill_describe(runner, mocker): assert result.exit_code == 0 +def test_cli_backfill_when_not_defined(runner, source_cluster_only_yaml_path): + result = runner.invoke(cli, ['--config-file', source_cluster_only_yaml_path, 'backfill', 'start'], + catch_exceptions=True) + assert result.exit_code == 2 + assert "Backfill migration is not set" in result.output + + def test_cli_backfill_create_rfs(runner, mocker): mock = mocker.patch.object(ECSRFSBackfill, 'create', autospec=True) result = runner.invoke(cli, ['--config-file', str(TEST_DATA_DIRECTORY / "services_with_ecs_rfs.yaml"), @@ -293,6 +420,13 @@ def test_get_backfill_status_with_deep_check(runner, mocker): mock_detailed_status_call.assert_called_once() +def test_cli_replay_when_not_defined(runner, source_cluster_only_yaml_path): + result = runner.invoke(cli, ['--config-file', source_cluster_only_yaml_path, 'replay', 'describe'], + catch_exceptions=True) + assert result.exit_code == 2 + assert "Replay is not set" in result.output + + def test_replay_describe(runner, mocker): mock = mocker.patch('console_link.middleware.replay.describe') result = runner.invoke(cli, ['--config-file', str(VALID_SERVICES_YAML), 'replay', 'describe'], @@ -341,6 +475,13 @@ def test_replay_status(runner, mocker): assert result.exit_code == 0 +def test_cli_metadata_when_not_defined(runner, source_cluster_only_yaml_path): + result = runner.invoke(cli, ['--config-file', source_cluster_only_yaml_path, 'metadata', 'migrate'], + catch_exceptions=True) + assert result.exit_code == 2 + assert "Metadata is not set" in result.output + + def test_cli_metadata_migrate(runner, mocker): mock = mocker.patch("subprocess.run") result = runner.invoke(cli, ['--config-file', str(VALID_SERVICES_YAML), 'metadata', 'migrate'], @@ -357,7 +498,14 @@ def test_cli_metadata_evaluate(runner, mocker): assert result.exit_code == 0 -def test_cli_with_metrics_get_data(runner, mocker): +def test_cli_metrics_when_not_defined(runner, source_cluster_only_yaml_path): + result = runner.invoke(cli, ['--config-file', source_cluster_only_yaml_path, 'metrics', 'list'], + catch_exceptions=True) + assert result.exit_code == 2 + assert "Metrics source is not set" in result.output + + +def test_cli_with_metrics_list_metrics(runner, mocker): mock = mocker.patch('console_link.models.metrics_source.PrometheusMetricsSource.get_metrics') result = runner.invoke(cli, ['--config-file', str(VALID_SERVICES_YAML), 'metrics', 'list'], catch_exceptions=True) @@ -365,41 +513,98 @@ def test_cli_with_metrics_get_data(runner, mocker): assert result.exit_code == 0 -def test_cli_kafka_create_topic(runner, mocker): - # These commands _should_ go through the middleware layer but currently don't - # middleware_mock = mocker.spy(middleware.kafka, 'create_topic') - # middleware_mock.assert_called_once_with(env.kafka, 'test') +def test_cli_with_metrics_list_metrics_as_json(runner, mocker): + mock = mocker.patch('console_link.models.metrics_source.PrometheusMetricsSource.get_metrics', + return_value={'captureProxy': ['kafkaCommitCount', 'captureConnectionDuration'], + 'replayer': ['kafkaCommitCount']}, autospec=True) + result = runner.invoke(cli, ['--config-file', str(VALID_SERVICES_YAML), '--json', 'metrics', 'list'], + catch_exceptions=True) + mock.assert_called_once() + assert result.exit_code == 0 + + +def test_cli_with_metrics_get_data(runner, mocker): + mock = mocker.patch('console_link.models.metrics_source.PrometheusMetricsSource.get_metric_data', + return_value=[('2024-05-22T20:06:00+00:00', 0.0), ('2024-05-22T20:07:00+00:00', 1.0), + ('2024-05-22T20:08:00+00:00', 2.0), ('2024-05-22T20:09:00+00:00', 3.0), + ('2024-05-22T20:10:00+00:00', 4.0)], + autospec=True) + result = runner.invoke(cli, ['--config-file', str(VALID_SERVICES_YAML), 'metrics', 'get-data', + 'replayer', 'kafkaCommitCount'], + catch_exceptions=True) + assert result.exit_code == 0 + mock.assert_called_once() + assert mock.call_args.args[1] == Component.REPLAYER + assert mock.call_args.args[2] == 'kafkaCommitCount' + + +def test_cli_with_metrics_get_data_as_json(runner, mocker): + mock = mocker.patch('console_link.models.metrics_source.PrometheusMetricsSource.get_metric_data', + return_value=[('2024-05-22T20:06:00+00:00', 0.0), ('2024-05-22T20:07:00+00:00', 1.0), + ('2024-05-22T20:08:00+00:00', 2.0), ('2024-05-22T20:09:00+00:00', 3.0), + ('2024-05-22T20:10:00+00:00', 4.0)], + autospec=True) + result = runner.invoke(cli, ['--config-file', str(VALID_SERVICES_YAML), '--json', 'metrics', 'get-data', + 'replayer', 'kafkaCommitCount'], + catch_exceptions=True) + assert result.exit_code == 0 + mock.assert_called_once() + assert mock.call_args.args[1] == Component.REPLAYER + assert mock.call_args.args[2] == 'kafkaCommitCount' + + +def test_cli_kafka_when_not_defined(runner, source_cluster_only_yaml_path): + result = runner.invoke(cli, ['--config-file', source_cluster_only_yaml_path, 'kafka', 'create-topic'], + catch_exceptions=True) + assert result.exit_code == 2 + assert "Kafka is not set" in result.output + +def test_cli_kafka_create_topic(runner, mocker): + middleware_mock = mocker.spy(middleware.kafka, 'create_topic') model_mock = mocker.patch.object(StandardKafka, 'create_topic') result = runner.invoke(cli, ['-vv', '--config-file', str(VALID_SERVICES_YAML), 'kafka', 'create-topic', '--topic-name', 'test'], catch_exceptions=True) + model_mock.assert_called_once_with(topic_name='test') + middleware_mock.assert_called_once() assert result.exit_code == 0 def test_cli_kafka_delete_topic(runner, mocker): model_mock = mocker.patch.object(StandardKafka, 'delete_topic') + middleware_mock = mocker.spy(middleware.kafka, 'delete_topic') result = runner.invoke(cli, ['-vv', '--config-file', str(VALID_SERVICES_YAML), 'kafka', 'delete-topic', '--topic-name', 'test', '--acknowledge-risk'], catch_exceptions=True) model_mock.assert_called_once_with(topic_name='test') + middleware_mock.assert_called_once() assert result.exit_code == 0 def test_cli_kafka_describe_consumer_group(runner, mocker): model_mock = mocker.patch.object(StandardKafka, 'describe_consumer_group') + middleware_mock = mocker.spy(middleware.kafka, 'describe_consumer_group') result = runner.invoke(cli, ['-vv', '--config-file', str(VALID_SERVICES_YAML), 'kafka', 'describe-consumer-group', '--group-name', 'test-group'], catch_exceptions=True) model_mock.assert_called_once_with(group_name='test-group') + middleware_mock.assert_called_once() assert result.exit_code == 0 def test_cli_kafka_describe_topic(runner, mocker): model_mock = mocker.patch.object(StandardKafka, 'describe_topic_records') + middleware_mock = mocker.spy(middleware.kafka, 'describe_topic_records') result = runner.invoke(cli, ['-vv', '--config-file', str(VALID_SERVICES_YAML), 'kafka', 'describe-topic-records', '--topic-name', 'test'], catch_exceptions=True) model_mock.assert_called_once_with(topic_name='test') + middleware_mock.assert_called_once() + assert result.exit_code == 0 + + +def test_completion_script(runner): + result = runner.invoke(cli, [str(VALID_SERVICES_YAML), 'completion', 'bash'], catch_exceptions=True) assert result.exit_code == 0 From 8160894448a85729a4ba847cae3b1f276588fb92 Mon Sep 17 00:00:00 2001 From: Peter Nied Date: Wed, 18 Sep 2024 13:26:54 -0500 Subject: [PATCH 31/38] Fix code smells in RfsMigrateDocuments.java (#965) - Remove usage of System.err - Create parameter object for method with 12 arguments - Remove useless continue - Use try-with-resources for closable objects - Removed TryHandlePhaseFailure Signed-off-by: Peter Nied --- .../src/main/java/com/rfs/CreateSnapshot.java | 68 +++---- .../java/com/rfs/RfsMigrateDocuments.java | 171 ++++++++++-------- .../rfs/ParallelDocumentMigrationsTest.java | 28 +-- .../java/com/rfs/ProcessLifecycleTest.java | 24 +-- .../src/test/java/com/rfs/SourceTestBase.java | 48 ++--- .../com/rfs/common/TryHandlePhaseFailure.java | 20 -- 6 files changed, 177 insertions(+), 182 deletions(-) delete mode 100644 RFS/src/main/java/com/rfs/common/TryHandlePhaseFailure.java diff --git a/CreateSnapshot/src/main/java/com/rfs/CreateSnapshot.java b/CreateSnapshot/src/main/java/com/rfs/CreateSnapshot.java index 85379c6f8..54f23c57d 100644 --- a/CreateSnapshot/src/main/java/com/rfs/CreateSnapshot.java +++ b/CreateSnapshot/src/main/java/com/rfs/CreateSnapshot.java @@ -1,7 +1,5 @@ package com.rfs; -import java.util.function.Function; - import org.opensearch.migrations.snapshot.creation.tracing.RootSnapshotContext; import org.opensearch.migrations.tracing.ActiveContextTracker; import org.opensearch.migrations.tracing.ActiveContextTrackerByActivityType; @@ -17,13 +15,14 @@ import com.rfs.common.OpenSearchClient; import com.rfs.common.S3SnapshotCreator; import com.rfs.common.SnapshotCreator; -import com.rfs.common.TryHandlePhaseFailure; import com.rfs.common.http.ConnectionContext; +import com.rfs.tracing.IRfsContexts.ICreateSnapshotContext; import com.rfs.worker.SnapshotRunner; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.extern.slf4j.Slf4j; +@AllArgsConstructor @Slf4j public class CreateSnapshot { public static class Args { @@ -100,39 +99,44 @@ public static void main(String[] args) throws Exception { } log.info("Running CreateSnapshot with {}", String.join(" ", args)); - run( - c -> ((arguments.fileSystemRepoPath != null) - ? new FileSystemSnapshotCreator( + var snapshotCreator = new CreateSnapshot(arguments, rootContext.createSnapshotCreateContext()); + snapshotCreator.run(); + } + + private Args arguments; + private ICreateSnapshotContext context; + + public void run() { + var client = new OpenSearchClient(arguments.sourceArgs.toConnectionContext()); + SnapshotCreator snapshotCreator; + if (arguments.fileSystemRepoPath != null) { + snapshotCreator = new FileSystemSnapshotCreator( arguments.snapshotName, - c, + client, arguments.fileSystemRepoPath, - rootContext.createSnapshotCreateContext() - ) - : new S3SnapshotCreator( - arguments.snapshotName, - c, - arguments.s3RepoUri, - arguments.s3Region, - arguments.maxSnapshotRateMBPerNode, - arguments.s3RoleArn, - rootContext.createSnapshotCreateContext() - )), - new OpenSearchClient(arguments.sourceArgs.toConnectionContext()), - arguments.noWait - ); - } + context + ); + } else { + snapshotCreator = new S3SnapshotCreator( + arguments.snapshotName, + client, + arguments.s3RepoUri, + arguments.s3Region, + arguments.maxSnapshotRateMBPerNode, + arguments.s3RoleArn, + context + ); + } - public static void run( - Function snapshotCreatorFactory, - OpenSearchClient openSearchClient, - boolean noWait - ) throws Exception { - TryHandlePhaseFailure.executeWithTryCatch(() -> { - if (noWait) { - SnapshotRunner.run(snapshotCreatorFactory.apply(openSearchClient)); + try { + if (arguments.noWait) { + SnapshotRunner.run(snapshotCreator); } else { - SnapshotRunner.runAndWaitForCompletion(snapshotCreatorFactory.apply(openSearchClient)); + SnapshotRunner.runAndWaitForCompletion(snapshotCreator); } - }); + } catch (Exception e) { + log.atError().setMessage("Unexpected error running RfsWorker").setCause(e).log(); + throw e; + } } } diff --git a/DocumentsFromSnapshotMigration/src/main/java/com/rfs/RfsMigrateDocuments.java b/DocumentsFromSnapshotMigration/src/main/java/com/rfs/RfsMigrateDocuments.java index 01a0582c7..a3d806ea0 100644 --- a/DocumentsFromSnapshotMigration/src/main/java/com/rfs/RfsMigrateDocuments.java +++ b/DocumentsFromSnapshotMigration/src/main/java/com/rfs/RfsMigrateDocuments.java @@ -6,11 +6,13 @@ import java.time.Clock; import java.time.Duration; import java.util.List; +import java.util.function.BiFunction; import java.util.function.Function; import org.opensearch.migrations.Version; import org.opensearch.migrations.VersionConverter; import org.opensearch.migrations.cluster.ClusterProviderRegistry; +import org.opensearch.migrations.cluster.ClusterSnapshotReader; import org.opensearch.migrations.reindexer.tracing.RootDocumentMigrationContext; import org.opensearch.migrations.tracing.ActiveContextTracker; import org.opensearch.migrations.tracing.ActiveContextTrackerByActivityType; @@ -23,6 +25,7 @@ import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import com.beust.jcommander.ParametersDelegate; +import com.rfs.RfsMigrateDocuments.RunParameters; import com.rfs.cms.CoordinateWorkHttpClient; import com.rfs.cms.IWorkCoordinator; import com.rfs.cms.LeaseExpireTrigger; @@ -37,12 +40,13 @@ import com.rfs.common.S3Uri; import com.rfs.common.SnapshotShardUnpacker; import com.rfs.common.SourceRepo; -import com.rfs.common.TryHandlePhaseFailure; import com.rfs.common.http.ConnectionContext; import com.rfs.models.IndexMetadata; import com.rfs.models.ShardMetadata; import com.rfs.worker.DocumentsRunner; import com.rfs.worker.ShardWorkPreparer; +import lombok.Builder; +import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.slf4j.MDC; @@ -167,7 +171,7 @@ public static void validateArgs(Args args) { } public static void main(String[] args) throws Exception { - System.err.println("Got args: " + String.join("; ", args)); + log.info("Got args: " + String.join("; ", args)); var workerId = ProcessHelpers.getNodeInstanceName(); log.info("Starting RfsMigrateDocuments with workerId =" + workerId); @@ -182,63 +186,63 @@ public static void main(String[] args) throws Exception { validateArgs(arguments); - var rootDocumentContext = makeRootContext(arguments, workerId); + var context = makeRootContext(arguments, workerId); var luceneDirPath = Paths.get(arguments.luceneDir); var snapshotLocalDirPath = arguments.snapshotLocalDir != null ? Paths.get(arguments.snapshotLocalDir) : null; + var connectionContext = arguments.targetArgs.toConnectionContext(); try (var processManager = new LeaseExpireTrigger(workItemId -> { log.error("Terminating RfsMigrateDocuments because the lease has expired for " + workItemId); System.exit(PROCESS_TIMED_OUT); - }, Clock.systemUTC())) { - ConnectionContext connectionContext = arguments.targetArgs.toConnectionContext(); + }, Clock.systemUTC()); var workCoordinator = new OpenSearchWorkCoordinator( new CoordinateWorkHttpClient(connectionContext), TOLERABLE_CLIENT_SERVER_CLOCK_DIFFERENCE_SECONDS, workerId - ); + )) { MDC.put(LOGGING_MDC_WORKER_ID, workerId); // I don't see a need to clean this up since we're in main - TryHandlePhaseFailure.executeWithTryCatch(() -> { - OpenSearchClient targetClient = new OpenSearchClient(connectionContext); - DocumentReindexer reindexer = new DocumentReindexer(targetClient, - arguments.numDocsPerBulkRequest, - arguments.numBytesPerBulkRequest, - arguments.maxConnections); - - SourceRepo sourceRepo; - if (snapshotLocalDirPath == null) { - sourceRepo = S3Repo.create( - Paths.get(arguments.s3LocalDir), - new S3Uri(arguments.s3RepoUri), - arguments.s3Region - ); - } else { - sourceRepo = new FileSystemRepo(snapshotLocalDirPath); - } - DefaultSourceRepoAccessor repoAccessor = new DefaultSourceRepoAccessor(sourceRepo); - - var sourceResourceProvider = ClusterProviderRegistry.getSnapshotReader(arguments.sourceVersion, sourceRepo); - - SnapshotShardUnpacker.Factory unpackerFactory = new SnapshotShardUnpacker.Factory( - repoAccessor, - luceneDirPath, - sourceResourceProvider.getBufferSizeInBytes() + OpenSearchClient targetClient = new OpenSearchClient(connectionContext); + DocumentReindexer reindexer = new DocumentReindexer(targetClient, + arguments.numDocsPerBulkRequest, + arguments.numBytesPerBulkRequest, + arguments.maxConnections); + + SourceRepo sourceRepo; + if (snapshotLocalDirPath == null) { + sourceRepo = S3Repo.create( + Paths.get(arguments.s3LocalDir), + new S3Uri(arguments.s3RepoUri), + arguments.s3Region ); + } else { + sourceRepo = new FileSystemRepo(snapshotLocalDirPath); + } + var repoAccessor = new DefaultSourceRepoAccessor(sourceRepo); - run( - LuceneDocumentsReader.getFactory(sourceResourceProvider), - reindexer, - workCoordinator, - arguments.initialLeaseDuration, - processManager, - sourceResourceProvider.getIndexMetadata(), - arguments.snapshotName, - arguments.indexAllowlist, - sourceResourceProvider.getShardMetadata(), - unpackerFactory, - arguments.maxShardSizeBytes, - rootDocumentContext - ); - }); + var sourceResourceProvider = ClusterProviderRegistry.getSnapshotReader(arguments.sourceVersion, sourceRepo); + + var unpackerFactory = new SnapshotShardUnpacker.Factory( + repoAccessor, + luceneDirPath, + sourceResourceProvider.getBufferSizeInBytes() + ); + + run(RunParameters.builder() + .leaseExpireTrigger(processManager) + .workCoordinator(workCoordinator) + .reindexer(reindexer) + .snapshotName(arguments.snapshotName) + .snapshotReader(sourceResourceProvider) + .snapshotUnpacker(unpackerFactory) + .documentReader(LuceneDocumentsReader.getFactory(sourceResourceProvider)) + .indexAllowlist(arguments.indexAllowlist) + .maxInitialLeaseDuration(arguments.initialLeaseDuration) + .maxShardSizeBytes(arguments.maxShardSizeBytes) + .tracingContext(context) + .build()); + } catch (Exception e) { + log.atError().setMessage("Unexpected error running RfsWorker").setCause(e).log(); + throw e; } } @@ -255,41 +259,38 @@ private static RootDocumentMigrationContext makeRootContext(Args arguments, Stri return new RootDocumentMigrationContext(otelSdk, compositeContextTracker); } - public static DocumentsRunner.CompletionStatus run( - Function readerFactory, - DocumentReindexer reindexer, - IWorkCoordinator workCoordinator, - Duration maxInitialLeaseDuration, - LeaseExpireTrigger leaseExpireTrigger, - IndexMetadata.Factory indexMetadataFactory, - String snapshotName, - List indexAllowlist, - ShardMetadata.Factory shardMetadataFactory, - SnapshotShardUnpacker.Factory unpackerFactory, - long maxShardSizeBytes, - RootDocumentMigrationContext rootDocumentContext - ) throws IOException, InterruptedException, NoWorkLeftException { - var scopedWorkCoordinator = new ScopedWorkCoordinator(workCoordinator, leaseExpireTrigger); + public static DocumentsRunner.CompletionStatus run(RunParameters params) throws Exception { + var scopedWorkCoordinator = new ScopedWorkCoordinator(params.workCoordinator, params.leaseExpireTrigger); confirmShardPrepIsComplete( - indexMetadataFactory, - snapshotName, - indexAllowlist, + params.snapshotReader.getIndexMetadata(), + params.snapshotName, + params.indexAllowlist, scopedWorkCoordinator, - rootDocumentContext + params.tracingContext ); - if (!workCoordinator.workItemsArePending( - rootDocumentContext.getWorkCoordinationContext()::createItemsPendingContext + if (!params.workCoordinator.workItemsArePending( + params.tracingContext.getWorkCoordinationContext()::createItemsPendingContext )) { throw new NoWorkLeftException("No work items are pending/all work items have been processed. Returning."); } - return new DocumentsRunner(scopedWorkCoordinator, maxInitialLeaseDuration, (name, shard) -> { - var shardMetadata = shardMetadataFactory.fromRepo(snapshotName, name, shard); + BiFunction shardFactory = (name, shard) -> { + var shardMetadataFactory = params.snapshotReader.getShardMetadata(); + var shardMetadata = shardMetadataFactory.fromRepo(params.snapshotName, name, shard); log.info("Shard size: " + shardMetadata.getTotalSizeBytes()); - if (shardMetadata.getTotalSizeBytes() > maxShardSizeBytes) { - throw new DocumentsRunner.ShardTooLargeException(shardMetadata.getTotalSizeBytes(), maxShardSizeBytes); + if (shardMetadata.getTotalSizeBytes() > params.maxShardSizeBytes) { + throw new DocumentsRunner.ShardTooLargeException(shardMetadata.getTotalSizeBytes(), params.maxShardSizeBytes); } return shardMetadata; - }, unpackerFactory, readerFactory, reindexer).migrateNextShard(rootDocumentContext::createReindexContext); + }; + var runner = new DocumentsRunner( + scopedWorkCoordinator, + params.maxInitialLeaseDuration, + shardFactory, + params.snapshotUnpacker, + params.documentReader, + params.reindexer); + var migrationStatus = runner.migrateNextShard(params.tracingContext::createReindexContext); + return migrationStatus; } private static void confirmShardPrepIsComplete( @@ -329,8 +330,32 @@ private static void confirmShardPrepIsComplete( .log(); Thread.sleep(lockRenegotiationMillis); lockRenegotiationMillis *= 2; - continue; } } } + + @Builder + static class RunParameters { + @NonNull + final LeaseExpireTrigger leaseExpireTrigger; + @NonNull + final IWorkCoordinator workCoordinator; + @NonNull + final String snapshotName; + @NonNull + final ClusterSnapshotReader snapshotReader; + @NonNull + final SnapshotShardUnpacker.Factory snapshotUnpacker; + @NonNull + final Function documentReader; + @NonNull + final DocumentReindexer reindexer; + @NonNull + final List indexAllowlist; + @NonNull + final Duration maxInitialLeaseDuration; + final long maxShardSizeBytes; + @NonNull + final RootDocumentMigrationContext tracingContext; + } } diff --git a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java index 50f162cb2..e846ce7c3 100644 --- a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java +++ b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ParallelDocumentMigrationsTest.java @@ -24,9 +24,6 @@ import org.opensearch.migrations.snapshot.creation.tracing.SnapshotTestContext; import com.rfs.common.FileSystemRepo; -import com.rfs.common.FileSystemSnapshotCreator; -import com.rfs.common.OpenSearchClient; -import com.rfs.common.http.ConnectionContextTestParams; import com.rfs.framework.PreloadedSearchClusterContainer; import com.rfs.framework.SearchClusterContainer; import lombok.Lombok; @@ -99,21 +96,16 @@ public void testDocumentMigration( return null; }, executorService)).join(); - final var SNAPSHOT_NAME = "test_snapshot"; + var args = new CreateSnapshot.Args(); + args.snapshotName = "test_snapshot"; + args.fileSystemRepoPath = SearchClusterContainer.CLUSTER_SNAPSHOT_DIR; + args.sourceArgs.host = esSourceContainer.getUrl(); + + var snapshotCreator = new CreateSnapshot(args, testSnapshotContext.createSnapshotCreateContext()); + snapshotCreator.run(); + + final List INDEX_ALLOWLIST = List.of(); - CreateSnapshot.run( - c -> new FileSystemSnapshotCreator( - SNAPSHOT_NAME, - c, - SearchClusterContainer.CLUSTER_SNAPSHOT_DIR, - testSnapshotContext.createSnapshotCreateContext() - ), - new OpenSearchClient(ConnectionContextTestParams.builder() - .host(esSourceContainer.getUrl()) - .build() - .toConnectionContext()), - false - ); var tempDir = Files.createTempDirectory("opensearchMigrationReindexFromSnapshot_test_snapshot"); try { esSourceContainer.copySnapshotData(tempDir.toString()); @@ -127,7 +119,7 @@ public void testDocumentMigration( CompletableFuture.supplyAsync( () -> migrateDocumentsSequentially( sourceRepo, - SNAPSHOT_NAME, + args.snapshotName, INDEX_ALLOWLIST, osTargetContainer.getUrl(), runCounter, diff --git a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ProcessLifecycleTest.java b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ProcessLifecycleTest.java index 1bfac8a54..1c06d167e 100644 --- a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ProcessLifecycleTest.java +++ b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/ProcessLifecycleTest.java @@ -20,9 +20,6 @@ import org.opensearch.migrations.testutils.ToxiProxyWrapper; import org.opensearch.testcontainers.OpensearchContainer; -import com.rfs.common.FileSystemSnapshotCreator; -import com.rfs.common.OpenSearchClient; -import com.rfs.common.http.ConnectionContextTestParams; import com.rfs.framework.PreloadedSearchClusterContainer; import com.rfs.framework.SearchClusterContainer; import eu.rekawek.toxiproxy.model.ToxicDirection; @@ -100,19 +97,14 @@ public void testProcessExitsAsExpected(String failAfterString, int expectedExitC return null; })).join(); - CreateSnapshot.run( - c -> new FileSystemSnapshotCreator( - SNAPSHOT_NAME, - c, - SearchClusterContainer.CLUSTER_SNAPSHOT_DIR, - testSnapshotContext.createSnapshotCreateContext() - ), - new OpenSearchClient(ConnectionContextTestParams.builder() - .host(esSourceContainer.getUrl()) - .build() - .toConnectionContext()), - false - ); + var args = new CreateSnapshot.Args(); + args.snapshotName = SNAPSHOT_NAME; + args.fileSystemRepoPath = SearchClusterContainer.CLUSTER_SNAPSHOT_DIR; + args.sourceArgs.host = esSourceContainer.getUrl(); + + var snapshotCreator = new CreateSnapshot(args, testSnapshotContext.createSnapshotCreateContext()); + snapshotCreator.run(); + esSourceContainer.copySnapshotData(tempDirSnapshot.toString()); int actualExitCode = runProcessAgainstToxicTarget(tempDirSnapshot, tempDirLucene, proxyContainer, failHow); diff --git a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/SourceTestBase.java b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/SourceTestBase.java index c181ab6cd..5fed0ee79 100644 --- a/DocumentsFromSnapshotMigration/src/test/java/com/rfs/SourceTestBase.java +++ b/DocumentsFromSnapshotMigration/src/test/java/com/rfs/SourceTestBase.java @@ -23,6 +23,7 @@ import org.opensearch.migrations.cluster.ClusterProviderRegistry; import org.opensearch.migrations.reindexer.tracing.DocumentMigrationTestContext; +import com.rfs.RfsMigrateDocuments.RunParameters; import com.rfs.cms.CoordinateWorkHttpClient; import com.rfs.cms.LeaseExpireTrigger; import com.rfs.cms.OpenSearchWorkCoordinator; @@ -189,36 +190,37 @@ public static DocumentsRunner.CompletionStatus migrateDocumentsWithOneWorker( final var nextClockShift = (int) (clockJitter.nextDouble() * ms_window) - (ms_window / 2); log.info("nextClockShift=" + nextClockShift); - Function readerFactory = path -> new FilteredLuceneDocumentsReader(path, sourceResourceProvider.getSoftDeletesPossible(), sourceResourceProvider.getSoftDeletesFieldData(), terminatingDocumentFilter); - return RfsMigrateDocuments.run( - readerFactory, - new DocumentReindexer(new OpenSearchClient(ConnectionContextTestParams.builder() + try (var workCoordinator = new OpenSearchWorkCoordinator( + new CoordinateWorkHttpClient(ConnectionContextTestParams.builder() .host(targetAddress) - .compressionEnabled(compressionEnabled) .build() - .toConnectionContext()), 1000, Long.MAX_VALUE, 1), - new OpenSearchWorkCoordinator( - new CoordinateWorkHttpClient(ConnectionContextTestParams.builder() + .toConnectionContext()), + TOLERABLE_CLIENT_SERVER_CLOCK_DIFFERENCE_SECONDS, + UUID.randomUUID().toString(), + Clock.offset(Clock.systemUTC(), Duration.ofMillis(nextClockShift)) + )) { + return RfsMigrateDocuments.run(RunParameters.builder() + .leaseExpireTrigger(processManager) + .workCoordinator(workCoordinator) + .snapshotName(snapshotName) + .snapshotReader(sourceResourceProvider) + .snapshotUnpacker(unpackerFactory) + .documentReader(readerFactory) + .reindexer(new DocumentReindexer(new OpenSearchClient(ConnectionContextTestParams.builder() .host(targetAddress) + .compressionEnabled(compressionEnabled) .build() - .toConnectionContext()), - TOLERABLE_CLIENT_SERVER_CLOCK_DIFFERENCE_SECONDS, - UUID.randomUUID().toString(), - Clock.offset(Clock.systemUTC(), Duration.ofMillis(nextClockShift)) - ), - Duration.ofMinutes(10), - processManager, - sourceResourceProvider.getIndexMetadata(), - snapshotName, - indexAllowlist, - sourceResourceProvider.getShardMetadata(), - unpackerFactory, - MAX_SHARD_SIZE_BYTES, - context - ); + .toConnectionContext()), 1000, Long.MAX_VALUE, 1)) + .maxInitialLeaseDuration(Duration.ofMinutes(10)) + .indexAllowlist(indexAllowlist) + .maxShardSizeBytes(MAX_SHARD_SIZE_BYTES) + .tracingContext(context) + .build() + ); + } } finally { deleteTree(tempDir); } diff --git a/RFS/src/main/java/com/rfs/common/TryHandlePhaseFailure.java b/RFS/src/main/java/com/rfs/common/TryHandlePhaseFailure.java deleted file mode 100644 index 120b22bce..000000000 --- a/RFS/src/main/java/com/rfs/common/TryHandlePhaseFailure.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.rfs.common; - -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class TryHandlePhaseFailure { - @FunctionalInterface - public interface TryBlock { - void run() throws Exception; - } - - public static void executeWithTryCatch(TryBlock tryBlock) throws Exception { - try { - tryBlock.run(); - } catch (Exception e) { - log.atError().setMessage("Unexpected error running RfsWorker").setCause(e).log(); - throw e; - } - } -} From 750f43837ff0c21e6e10b8bdfaab52fb8fee14de Mon Sep 17 00:00:00 2001 From: Andre Kurait Date: Wed, 18 Sep 2024 13:40:15 -0500 Subject: [PATCH 32/38] Extend acceptible docker container startup time in tests Signed-off-by: Andre Kurait --- RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java | 1 - .../java/com/rfs/framework/SearchClusterContainer.java | 4 ++-- .../migrations/replay/http/retries/HttpRetryTest.java | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java b/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java index e2c594c70..42f5c8581 100644 --- a/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java +++ b/RFS/src/test/java/com/rfs/cms/WorkCoordinatorTest.java @@ -84,7 +84,6 @@ private JsonNode searchForExpiredDocs(long expirationEpochSeconds) { @Test public void testAcquireLeaseHasNoUnnecessaryConflicts() throws Exception { - log.error("Hello"); var testContext = WorkCoordinationTestContext.factory().withAllTracking(); final var NUM_DOCS = 100; try (var workCoordinator = new OpenSearchWorkCoordinator(httpClientSupplier.get(), 3600, "docCreatorWorker")) { diff --git a/RFS/src/testFixtures/java/com/rfs/framework/SearchClusterContainer.java b/RFS/src/testFixtures/java/com/rfs/framework/SearchClusterContainer.java index 13c51cae4..69458705a 100644 --- a/RFS/src/testFixtures/java/com/rfs/framework/SearchClusterContainer.java +++ b/RFS/src/testFixtures/java/com/rfs/framework/SearchClusterContainer.java @@ -72,7 +72,7 @@ public SearchClusterContainer(final ContainerVersion version) { super(DockerImageName.parse(version.imageName)); this.withExposedPorts(9200, 9300) .withEnv(version.getInitializationType().getEnvVariables()) - .waitingFor(Wait.forHttp("/").forPort(9200).forStatusCode(200).withStartupTimeout(Duration.ofMinutes(1))); + .waitingFor(Wait.forHttp("/").forPort(9200).forStatusCode(200).withStartupTimeout(Duration.ofMinutes(5))); this.containerVersion = version; } @@ -130,7 +130,7 @@ public static class ContainerVersion { public ContainerVersion(final String imageName, final Version version, INITIALIZATION_FLAVOR initializationType) { this.imageName = imageName; - this.version = version; + this.version = version; this.initializationType = initializationType; } diff --git a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java index a77c41bb3..feff8bc01 100644 --- a/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java +++ b/TrafficCapture/trafficReplayer/src/test/java/org/opensearch/migrations/replay/http/retries/HttpRetryTest.java @@ -218,7 +218,7 @@ public void testMalformedResponseFailuresNeverGiveUp() throws Exception { var server = new GenericContainer<>(HTTPD_IMAGE) .withNetwork(network) .withNetworkAliases(SERVERNAME_ALIAS) - .waitingFor(Wait.forHttp("/").forStatusCode(200)); + .waitingFor(Wait.forHttp("/").forStatusCode(200)).withStartupTimeout(Duration.ofMinutes(5)); var toxiproxy = new ToxiProxyWrapper(network)) { server.start(); From 81928aa6d792045819b67ab6edc5d1ac153f67ff Mon Sep 17 00:00:00 2001 From: Mikayla Thompson Date: Wed, 18 Sep 2024 13:17:08 -0600 Subject: [PATCH 33/38] Add more Migration Console test coverage (#971) Signed-off-by: Mikayla Thompson --- .../console_link/models/factories.py | 16 +- .../lib/console_link/tests/test_backfill.py | 50 ++++- .../lib/console_link/tests/test_cluster.py | 62 ++++++ .../lib/console_link/tests/test_kafka.py | 194 ++++++++++++++++++ .../lib/console_link/tests/test_replay.py | 22 +- 5 files changed, 334 insertions(+), 10 deletions(-) create mode 100644 TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_kafka.py diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/factories.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/factories.py index 57d2765b7..5ea256430 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/factories.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/factories.py @@ -67,7 +67,11 @@ def get_replayer(config: Dict): def get_kafka(config: Dict): if 'msk' in config: return MSK(config) - return StandardKafka(config) + if 'standard' in config: + return StandardKafka(config) + config.pop("broker_endpoints", None) + logger.error(f"An unsupported kafka source type was provided: {config.keys()}") + raise UnsupportedKafkaError(', '.join(config.keys())) def get_backfill(config: Dict, source_cluster: Optional[Cluster], target_cluster: Optional[Cluster]) -> Backfill: @@ -93,10 +97,8 @@ def get_backfill(config: Dict, source_cluster: Optional[Cluster], target_cluster return ECSRFSBackfill(config=config, target_cluster=target_cluster) - logger.error(f"An unsupported metrics source type was provided: {config.keys()}") - if len(config.keys()) > 1: - raise UnsupportedBackfillTypeError(', '.join(config.keys())) - raise UnsupportedBackfillTypeError(next(iter(config.keys()))) + logger.error(f"An unsupported backfill source type was provided: {config.keys()}") + raise UnsupportedBackfillTypeError(', '.join(config.keys())) def get_metrics_source(config): @@ -106,6 +108,4 @@ def get_metrics_source(config): return CloudwatchMetricsSource(config) else: logger.error(f"An unsupported metrics source type was provided: {config.keys()}") - if len(config.keys()) > 1: - raise UnsupportedMetricsSourceError(', '.join(config.keys())) - raise UnsupportedMetricsSourceError(next(iter(config.keys()))) + raise UnsupportedMetricsSourceError(', '.join(config.keys())) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py index 565c22017..8d9c712b8 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_backfill.py @@ -2,8 +2,10 @@ import pathlib import pytest +import requests import requests_mock +from console_link.models.cluster import Cluster from console_link.models.backfill_base import Backfill, BackfillStatus from console_link.models.backfill_osi import OpenSearchIngestionBackfill from console_link.models.backfill_rfs import DockerRFSBackfill, ECSRFSBackfill @@ -131,6 +133,17 @@ def test_get_backfill_unsupported_type(): assert "fetch" in str(excinfo.value.args[1]) +def test_get_backfill_multiple_types(): + unknown_config = { + "fetch": {"data": "xyz"}, + "new_backfill": {"data": "abc"} + } + with pytest.raises(UnsupportedBackfillTypeError) as excinfo: + get_backfill(unknown_config, None, None) + assert "fetch" in excinfo.value.args[1] + assert "new_backfill" in excinfo.value.args[1] + + def test_cant_instantiate_with_multiple_types(): config = { "opensearch_ingestion": { @@ -245,7 +258,6 @@ def test_ecs_rfs_calculates_backfill_status_from_ecs_instance_statuses_running(e assert str(mocked_running_status) == value.value[1] -@pytest.mark.skip(reason="Need to implement mocking for multiple calls to endpoint") def test_ecs_rfs_get_status_deep_check(ecs_rfs_backfill, mocker): target = create_valid_cluster() mocked_instance_status = InstanceStatuses( @@ -269,3 +281,39 @@ def test_ecs_rfs_get_status_deep_check(ecs_rfs_backfill, mocker): assert BackfillStatus.RUNNING == value.value[0] assert str(mocked_instance_status) in value.value[1] assert str(total_shards) in value.value[1] + + +def test_ecs_rfs_deep_status_check_failure(ecs_rfs_backfill, mocker, caplog): + mocked_instance_status = InstanceStatuses( + desired=1, + running=1, + pending=0 + ) + mock_ecs = mocker.patch.object(ECSService, 'get_instance_statuses', autospec=True, + return_value=mocked_instance_status) + mock_api = mocker.patch.object(Cluster, 'call_api', side_effect=requests.exceptions.RequestException()) + result = ecs_rfs_backfill.get_status(deep_check=True) + assert "Working state index does not yet exist" in caplog.text + mock_ecs.assert_called_once() + mock_api.assert_called_once() + assert result.success + assert result.value[0] == BackfillStatus.RUNNING + + +def test_docker_backfill_not_implemented_commands(): + docker_rfs_config = { + "reindex_from_snapshot": { + "docker": None + } + } + docker_rfs_backfill = get_backfill(docker_rfs_config, None, target_cluster=create_valid_cluster()) + assert isinstance(docker_rfs_backfill, DockerRFSBackfill) + + with pytest.raises(NotImplementedError): + docker_rfs_backfill.start() + + with pytest.raises(NotImplementedError): + docker_rfs_backfill.stop() + + with pytest.raises(NotImplementedError): + docker_rfs_backfill.scale(units=3) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cluster.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cluster.py index ebbc03946..1d7bfb7cf 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cluster.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_cluster.py @@ -305,3 +305,65 @@ def test_valid_cluster_api_call_with_sigv4_auth(requests_mock, aws_credentials): assert "Signature=" in auth_header assert "es" in auth_header assert "us-east-2" in auth_header + + +def test_call_api_via_middleware(requests_mock): + cluster = create_valid_cluster(auth_type=AuthMethod.NO_AUTH) + requests_mock.get(f"{cluster.endpoint}/test_api", json={'test': True}) + + response = clusters_.call_api(cluster, '/test_api') + assert response.status_code == 200 + assert response.json() == {'test': True} + + +def test_cat_indices_with_refresh(requests_mock): + cluster = create_valid_cluster(auth_type=AuthMethod.NO_AUTH) + refresh_mock = requests_mock.get(f"{cluster.endpoint}/_refresh") + indices_mock = requests_mock.get(f"{cluster.endpoint}/_cat/indices/_all") + + clusters_.cat_indices(cluster, refresh=True) + assert refresh_mock.call_count == 1 + assert indices_mock.call_count == 1 + + +def test_clear_indices(requests_mock): + cluster = create_valid_cluster(auth_type=AuthMethod.NO_AUTH) + mock = requests_mock.delete(f"{cluster.endpoint}/*,-.*,-searchguard*,-sg7*,.migrations_working_state") + clusters_.clear_indices(cluster) + assert mock.call_count == 1 + + +def test_run_benchmark_executes_correctly_no_auth(mocker): + cluster = create_valid_cluster(auth_type=AuthMethod.NO_AUTH) + mock = mocker.patch("subprocess.run", autospec=True) + workload = "nyctaxis" + cluster.execute_benchmark_workload(workload=workload) + mock.assert_called_once_with("opensearch-benchmark execute-test --distribution-version=1.0.0 " + f"--target-host={cluster.endpoint} --workload={workload} --pipeline=benchmark-only" + " --test-mode --kill-running-processes --workload-params=target_throughput:0.5," + "bulk_size:10,bulk_indexing_clients:1,search_clients:1 " + "--client-options=verify_certs:false", shell=True) + + +def test_run_benchmark_raises_error_sigv4_auth(): + cluster = create_valid_cluster(auth_type=AuthMethod.SIGV4, details={"region": "eu-west-1", "service": "aoss"}) + workload = "nyctaxis" + with pytest.raises(NotImplementedError): + cluster.execute_benchmark_workload(workload=workload) + + +def test_run_benchmark_executes_correctly_basic_auth_and_https(mocker): + auth_details = {"username": "admin", "password": "Admin1"} + cluster = create_valid_cluster(auth_type=AuthMethod.BASIC_AUTH, details=auth_details) + cluster.allow_insecure = False + + mock = mocker.patch("subprocess.run", autospec=True) + workload = "nyctaxis" + cluster.execute_benchmark_workload(workload=workload) + mock.assert_called_once_with("opensearch-benchmark execute-test --distribution-version=1.0.0 " + f"--target-host={cluster.endpoint} --workload={workload} --pipeline=benchmark-only" + " --test-mode --kill-running-processes --workload-params=target_throughput:0.5," + "bulk_size:10,bulk_indexing_clients:1,search_clients:1 " + "--client-options=verify_certs:false,use_ssl:true," + f"basic_auth_user:{auth_details['username']}," + f"basic_auth_password:{auth_details['password']}", shell=True) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_kafka.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_kafka.py new file mode 100644 index 000000000..2290fda23 --- /dev/null +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_kafka.py @@ -0,0 +1,194 @@ +import pytest + +from console_link.models.factories import UnsupportedKafkaError, get_kafka +from console_link.models.kafka import Kafka, MSK, StandardKafka + + +def test_get_msk_kafka(): + config = { + "broker_endpoints": "abc", + "msk": None + } + kafka = get_kafka(config) + assert isinstance(kafka, Kafka) + assert isinstance(kafka, MSK) + + +def test_get_standard_kafka(): + config = { + "broker_endpoints": "abc", + "standard": None + } + kafka = get_kafka(config) + assert isinstance(kafka, Kafka) + assert isinstance(kafka, StandardKafka) + + +def test_unsupported_kafka_type_raises_error(): + config = { + "broker_endpoints": "abc", + "new_kafka_type": None + } + with pytest.raises(UnsupportedKafkaError) as exc_info: + get_kafka(config) + assert 'new_kafka_type' in exc_info.value.args + + +def test_no_kafka_type_raises_error(): + config = { + "broker_endpoints": "abc", + } + with pytest.raises(UnsupportedKafkaError): + get_kafka(config) + + +def test_multiple_kafka_types_raises_error(): + config = { + "broker_endpoints": "abc", + "msk": None, + "standard": None + } + with pytest.raises(ValueError) as exc_info: + get_kafka(config) + + assert "More than one value is present" in exc_info.value.args[0]['kafka'][0] + + +def test_msk_kafka_create_topic(mocker): + config = { + "broker_endpoints": "abc", + "msk": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.create_topic(topic_name='new_topic') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-topics.sh', + '--bootstrap-server', f"{config['broker_endpoints']}", '--create', + '--topic', 'new_topic', '--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties' + ], capture_output=True, text=True, check=True) + + +def test_standard_kafka_create_topic(mocker): + config = { + "broker_endpoints": "abc", + "standard": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.create_topic(topic_name='new_topic') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-topics.sh', + '--bootstrap-server', f"{config['broker_endpoints']}", '--create', + '--topic', 'new_topic' + ], capture_output=True, text=True, check=True) + + +def test_msk_kafka_delete_topic(mocker): + config = { + "broker_endpoints": "abc", + "msk": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.delete_topic(topic_name='new_topic') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-topics.sh', + '--bootstrap-server', f"{config['broker_endpoints']}", '--delete', + '--topic', 'new_topic', '--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties' + ], capture_output=True, text=True, check=True) + + +def test_standard_kafka_delete_topic(mocker): + config = { + "broker_endpoints": "abc", + "standard": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.delete_topic(topic_name='new_topic') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-topics.sh', + '--bootstrap-server', f"{config['broker_endpoints']}", '--delete', + '--topic', 'new_topic' + ], capture_output=True, text=True, check=True) + + +def test_msk_kafka_describe_topic(mocker): + config = { + "broker_endpoints": "abc", + "msk": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.describe_topic_records(topic_name='new_topic') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-run-class.sh', 'kafka.tools.GetOffsetShell', + '--broker-list', f"{config['broker_endpoints']}", + '--topic', 'new_topic', + '--time', '-1', + '--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties' + ], capture_output=True, text=True, check=True) + + +def test_standard_kafka_describe_topic(mocker): + config = { + "broker_endpoints": "abc", + "standard": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.describe_topic_records(topic_name='new_topic') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-run-class.sh', 'kafka.tools.GetOffsetShell', + '--broker-list', f"{config['broker_endpoints']}", + '--topic', 'new_topic', + '--time', '-1' + ], capture_output=True, text=True, check=True) + + +def test_msk_kafka_describe_group(mocker): + config = { + "broker_endpoints": "abc", + "msk": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.describe_consumer_group(group_name='new_group') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-consumer-groups.sh', + '--bootstrap-server', f"{config['broker_endpoints']}", '--timeout', '100000', '--describe', + '--group', 'new_group', + '--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties' + ], capture_output=True, text=True, check=True) + + +def test_standard_kafka_describe_group(mocker): + config = { + "broker_endpoints": "abc", + "standard": None + } + kafka = get_kafka(config) + mock = mocker.patch('subprocess.run', autospec=True) + result = kafka.describe_consumer_group(group_name='new_group') + + assert result.success + mock.assert_called_once_with( + ['/root/kafka-tools/kafka/bin/kafka-consumer-groups.sh', + '--bootstrap-server', f"{config['broker_endpoints']}", '--timeout', '100000', '--describe', + '--group', 'new_group', + ], capture_output=True, text=True, check=True) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_replay.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_replay.py index bfa374fe5..9d534af76 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_replay.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_replay.py @@ -6,9 +6,10 @@ import console_link.middleware.replay as replay_ from console_link.models.ecs_service import ECSService -from console_link.models.factories import get_replayer +from console_link.models.factories import UnsupportedReplayerError, get_replayer from console_link.models.replayer_base import Replayer from console_link.models.replayer_ecs import ECSReplayer +from console_link.models.replayer_docker import DockerReplayer TEST_DATA_DIRECTORY = pathlib.Path(__file__).parent / "data" AWS_REGION = "us-east-1" @@ -116,3 +117,22 @@ def test_replayer_describe_as_json(): success, output = replay_.describe(replayer, as_json=True) assert success assert json.loads(output) == config + + +def test_get_docker_replayer(): + config = { + "docker": None + } + replayer = get_replayer(config) + assert isinstance(replayer, DockerReplayer) + + +def test_nonexistent_replayer_type(): + config = { + "new_replayer_type": { + "setting": "value" + } + } + with pytest.raises(UnsupportedReplayerError) as exc_info: + get_replayer(config) + assert 'new_replayer_type' in exc_info.value.args From eefc80bde1f2b2a410e2986021c35be6b9c76a42 Mon Sep 17 00:00:00 2001 From: Mikayla Thompson Date: Wed, 18 Sep 2024 14:19:39 -0600 Subject: [PATCH 34/38] Fix Sonarqube code smells in console lib (#973) Signed-off-by: Mikayla Thompson --- .../console_link/console_link/models/kafka.py | 21 ++++++----- .../console_link/models/metadata.py | 24 ++++++------ .../console_link/models/metrics_source.py | 37 +++++++++---------- .../console_link/tests/test_metrics_source.py | 4 +- 4 files changed, 43 insertions(+), 43 deletions(-) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/kafka.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/kafka.py index 38017ab6b..1791ac8e9 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/kafka.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/kafka.py @@ -29,6 +29,9 @@ } } +KAFKA_TOPICS_COMMAND = '/root/kafka-tools/kafka/bin/kafka-topics.sh' +MSK_AUTH_PARAMETERS = ['--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties'] + def get_result_for_command(command: List[str], operation_name: str) -> CommandResult: try: @@ -103,28 +106,26 @@ def __init__(self, config): super().__init__(config) def delete_topic(self, topic_name='logging-traffic-topic') -> CommandResult: - command = ['/root/kafka-tools/kafka/bin/kafka-topics.sh', '--bootstrap-server', f'{self.brokers}', '--delete', - '--topic', f'{topic_name}', '--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties'] + command = [KAFKA_TOPICS_COMMAND, '--bootstrap-server', f'{self.brokers}', '--delete', + '--topic', f'{topic_name}'] + MSK_AUTH_PARAMETERS logger.info(f"Executing command: {command}") return get_result_for_command(command, "Delete Topic") def create_topic(self, topic_name='logging-traffic-topic') -> CommandResult: - command = ['/root/kafka-tools/kafka/bin/kafka-topics.sh', '--bootstrap-server', f'{self.brokers}', '--create', - '--topic', f'{topic_name}', '--command-config', '/root/kafka-tools/aws/msk-iam-auth.properties'] + command = [KAFKA_TOPICS_COMMAND, '--bootstrap-server', f'{self.brokers}', '--create', + '--topic', f'{topic_name}'] + MSK_AUTH_PARAMETERS logger.info(f"Executing command: {command}") return get_result_for_command(command, "Create Topic") def describe_consumer_group(self, group_name='logging-group-default') -> CommandResult: command = ['/root/kafka-tools/kafka/bin/kafka-consumer-groups.sh', '--bootstrap-server', f'{self.brokers}', - '--timeout', '100000', '--describe', '--group', f'{group_name}', '--command-config', - '/root/kafka-tools/aws/msk-iam-auth.properties'] + '--timeout', '100000', '--describe', '--group', f'{group_name}'] + MSK_AUTH_PARAMETERS logger.info(f"Executing command: {command}") return get_result_for_command(command, "Describe Consumer Group") def describe_topic_records(self, topic_name='logging-traffic-topic') -> CommandResult: command = ['/root/kafka-tools/kafka/bin/kafka-run-class.sh', 'kafka.tools.GetOffsetShell', '--broker-list', - f'{self.brokers}', '--topic', f'{topic_name}', '--time', '-1', '--command-config', - '/root/kafka-tools/aws/msk-iam-auth.properties'] + f'{self.brokers}', '--topic', f'{topic_name}', '--time', '-1'] + MSK_AUTH_PARAMETERS logger.info(f"Executing command: {command}") result = get_result_for_command(command, "Describe Topic Records") if result.success and result.value: @@ -142,13 +143,13 @@ def __init__(self, config): super().__init__(config) def delete_topic(self, topic_name='logging-traffic-topic') -> CommandResult: - command = ['/root/kafka-tools/kafka/bin/kafka-topics.sh', '--bootstrap-server', f'{self.brokers}', '--delete', + command = [KAFKA_TOPICS_COMMAND, '--bootstrap-server', f'{self.brokers}', '--delete', '--topic', f'{topic_name}'] logger.info(f"Executing command: {command}") return get_result_for_command(command, "Delete Topic") def create_topic(self, topic_name='logging-traffic-topic') -> CommandResult: - command = ['/root/kafka-tools/kafka/bin/kafka-topics.sh', '--bootstrap-server', f'{self.brokers}', '--create', + command = [KAFKA_TOPICS_COMMAND, '--bootstrap-server', f'{self.brokers}', '--create', '--topic', f'{topic_name}'] logger.info(f"Executing command: {command}") return get_result_for_command(command, "Create Topic") diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py index ddf5617f3..bf4993854 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metadata.py @@ -136,29 +136,29 @@ def _init_from_fs_snapshot(self, snapshot: FileSystemSnapshot) -> None: self._snapshot_location = "fs" self._repo_path = snapshot.repo_path - def _appendArgs(self, commands: Dict[str, Any], args_to_add: List[str]) -> None: + def _append_args(self, commands: Dict[str, Any], args_to_add: List[str]) -> None: if args_to_add is None: return - def isCommand(arg: str) -> bool: + def is_command(arg: Optional[str]) -> bool: if arg is None: return False return arg.startswith('--') or arg.startswith('-') - def isValue(arg: str) -> bool: + def is_value(arg: Optional[str]) -> bool: if arg is None: return False - return not isCommand(arg) + return not is_command(arg) i = 0 while i < len(args_to_add): arg = args_to_add[i] - nextArg = args_to_add[i + 1] if (i + 1 < len(args_to_add)) else None + next_arg = args_to_add[i + 1] if (i + 1 < len(args_to_add)) else None - if isCommand(arg) and isValue(nextArg): - commands[arg] = nextArg + if is_command(arg) and is_value(next_arg): + commands[arg] = next_arg i += 2 # Move past the command and value - elif isCommand(arg): + elif is_command(arg): commands[arg] = None i += 1 # Move past the command, its a flag else: @@ -167,13 +167,13 @@ def isValue(arg: str) -> bool: def evaluate(self, extra_args=None) -> CommandResult: logger.info("Starting metadata migration") - return self.migrateOrEvaluate("evaluate", extra_args) + return self.migrate_or_evaluate("evaluate", extra_args) def migrate(self, extra_args=None) -> CommandResult: logger.info("Starting metadata migration") - return self.migrateOrEvaluate("migrate", extra_args) + return self.migrate_or_evaluate("migrate", extra_args) - def migrateOrEvaluate(self, command: str, extra_args=None) -> CommandResult: + def migrate_or_evaluate(self, command: str, extra_args=None) -> CommandResult: command_base = "/root/metadataMigration/bin/MetadataMigration" command_args = {} @@ -232,7 +232,7 @@ def migrateOrEvaluate(self, command: str, extra_args=None) -> CommandResult: command_args.update({"--source-version": self._source_cluster_version}) # Extra args might not be represented with dictionary, so convert args to list and append commands - self._appendArgs(command_args, extra_args) + self._append_args(command_args, extra_args) command_runner = CommandRunner(command_base, command_args, sensitive_fields=["--target-password"]) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metrics_source.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metrics_source.py index d24eaf8f2..fccdb9cf4 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metrics_source.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/console_link/models/metrics_source.py @@ -74,9 +74,9 @@ def get_metric_data( component: Component, metric: str, statistic: MetricStatistic, - startTime: datetime, + start_time: datetime, period_in_seconds: int = 60, - endTime: Optional[datetime] = None, + end_time: Optional[datetime] = None, dimensions: Optional[Dict] = None, ) -> List[Tuple[str, float]]: raise NotImplementedError @@ -147,21 +147,21 @@ def get_metric_data( component: Component, metric: str, statistic: MetricStatistic, - startTime: datetime, + start_time: datetime, period_in_seconds: int = 60, - endTime: Optional[datetime] = None, + end_time: Optional[datetime] = None, dimensions: Optional[Dict[str, str]] = None, ) -> List[Tuple[str, float]]: logger.info(f"{self.__class__.__name__}.get_metric_data called with {component=}, {metric=}, {statistic=}," - f"{startTime=}, {period_in_seconds=}, {endTime=}, {dimensions=}") + f"{start_time=}, {period_in_seconds=}, {end_time=}, {dimensions=}") aws_dimensions = [{"Name": "OTelLib", "Value": component.value}] if dimensions: aws_dimensions += [{"Name": k, "Value": v} for k, v in dimensions.items()] logger.debug(f"AWS Dimensions set to: {aws_dimensions}") - if not endTime: - endTime = datetime.now() - logger.debug(f"No endTime provided, using current time: {endTime}") + if not end_time: + end_time = datetime.now() + logger.debug(f"No endTime provided, using current time: {end_time}") response = self.client.get_metric_data( MetricDataQueries=[ { @@ -177,8 +177,8 @@ def get_metric_data( }, }, ], - StartTime=startTime, - EndTime=endTime, + StartTime=start_time, + EndTime=end_time, ScanBy="TimestampAscending", ) raise_for_aws_api_error(response) @@ -199,8 +199,7 @@ def prometheus_component_names(c: Component) -> str: return "capture" elif c == Component.REPLAYER: return "replay" - else: - raise ValueError(f"Unsupported component: {c}") + raise ValueError(f"Unsupported component: {c}") class PrometheusMetricsSource(MetricsSource): @@ -235,21 +234,21 @@ def get_metric_data( component: Component, metric: str, statistic: MetricStatistic, - startTime: datetime, + start_time: datetime, period_in_seconds: int = 60, - endTime: Optional[datetime] = None, + end_time: Optional[datetime] = None, dimensions: Optional[Dict] = None, ) -> List[Tuple[str, float]]: logger.info(f"{self.__class__.__name__} get_metric_data called with {component=}, {metric=}, {statistic=}," - f"{startTime=}, {period_in_seconds=}, {endTime=}, {dimensions=}") - if not endTime: - endTime = datetime.now() + f"{start_time=}, {period_in_seconds=}, {end_time=}, {dimensions=}") + if not end_time: + end_time = datetime.now() r = requests.get( f"{self.endpoint}/api/v1/query_range", params={ # type: ignore "query": f'{metric}{{exported_job="{prometheus_component_names(component)}"}}', - "start": startTime.timestamp(), - "end": endTime.timestamp(), + "start": start_time.timestamp(), + "end": end_time.timestamp(), "step": period_in_seconds, }, ) diff --git a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metrics_source.py b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metrics_source.py index 417c20102..d353f6e11 100644 --- a/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metrics_source.py +++ b/TrafficCapture/dockerSolution/src/main/docker/migrationConsole/lib/console_link/tests/test_metrics_source.py @@ -151,7 +151,7 @@ def test_cloudwatch_metrics_get_metric_data(cw_ms, cw_stubber): cw_ms.client = cw_stubber.client metrics = cw_ms.get_metric_data(Component.CAPTUREPROXY, "kafkaCommitCount", - MetricStatistic.Average, startTime=datetime.datetime.now()) + MetricStatistic.Average, start_time=datetime.datetime.now()) assert metrics == mock_metric_data @@ -184,5 +184,5 @@ def test_prometheus_get_metric_for_nonexistent_component(prometheus_ms): with pytest.raises(ValueError): prometheus_ms.get_metric_data( Component(3), "kafkaCommitCount", - MetricStatistic.Average, startTime=datetime.datetime.now() + MetricStatistic.Average, start_time=datetime.datetime.now() ) From bb71a2706f6f30dc1541632cec3c2f312632840c Mon Sep 17 00:00:00 2001 From: Andre Kurait Date: Wed, 18 Sep 2024 15:20:28 -0500 Subject: [PATCH 35/38] Address SonarQube findings in transformations Signed-off-by: Andre Kurait --- .../java/com/rfs/common/OpenSearchClient.java | 8 ++--- .../org/opensearch/migrations/Flavor.java | 4 +-- .../org/opensearch/migrations/Version.java | 30 +++++++++-------- .../migrations/VersionMatchers.java | 16 ++++------ .../transformation/CanApplyResult.java | 5 +-- .../rules/IndexMappingTypeRemoval.java | 32 ++++++++++--------- .../opensearch/migrations/VersionTest.java | 6 ++-- 7 files changed, 51 insertions(+), 50 deletions(-) diff --git a/RFS/src/main/java/com/rfs/common/OpenSearchClient.java b/RFS/src/main/java/com/rfs/common/OpenSearchClient.java index a091d8fb5..f63d11da4 100644 --- a/RFS/src/main/java/com/rfs/common/OpenSearchClient.java +++ b/RFS/src/main/java/com/rfs/common/OpenSearchClient.java @@ -95,12 +95,12 @@ private Version versionFromResponse(HttpResponse resp) { .major(Integer.parseInt(parts[0])) .minor(Integer.parseInt(parts[1])) .patch(parts.length > 2 ? Integer.parseInt(parts[2]) : 0); - + var distroNode = versionNode.get("distribution"); if (distroNode != null && distroNode.asText().equalsIgnoreCase("opensearch")) { - versionBuilder.flavor(Flavor.OpenSearch); - } else { - versionBuilder.flavor(Flavor.Elasticsearch); + versionBuilder.flavor(Flavor.OPENSEARCH); + } else { + versionBuilder.flavor(Flavor.ELASTICSEARCH); } return versionBuilder.build(); } catch (Exception e) { diff --git a/transformation/src/main/java/org/opensearch/migrations/Flavor.java b/transformation/src/main/java/org/opensearch/migrations/Flavor.java index 151d1b7fa..f835c92dc 100644 --- a/transformation/src/main/java/org/opensearch/migrations/Flavor.java +++ b/transformation/src/main/java/org/opensearch/migrations/Flavor.java @@ -6,8 +6,8 @@ @RequiredArgsConstructor @Getter public enum Flavor { - Elasticsearch("ES"), - OpenSearch("OS"); + ELASTICSEARCH("ES"), + OPENSEARCH("OS"); final String shorthand; } diff --git a/transformation/src/main/java/org/opensearch/migrations/Version.java b/transformation/src/main/java/org/opensearch/migrations/Version.java index ec11678b5..329a42e78 100644 --- a/transformation/src/main/java/org/opensearch/migrations/Version.java +++ b/transformation/src/main/java/org/opensearch/migrations/Version.java @@ -1,5 +1,7 @@ package org.opensearch.migrations; +import java.util.Arrays; + import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -23,20 +25,20 @@ public static Version fromString(final String raw) throws RuntimeException { var builder = Version.builder(); var remainingString = raw.toLowerCase(); - for (var flavor : Flavor.values()) { - if (remainingString.startsWith(flavor.name().toLowerCase())) { - remainingString = remainingString.substring(flavor.name().length()); - builder.flavor(flavor); - break; - } else if (remainingString.startsWith(flavor.shorthand.toLowerCase())) { - remainingString = remainingString.substring(flavor.shorthand.length()); - builder.flavor(flavor); - break; - } - } + var finalRemainingString = remainingString; + var matchedFlavor = Arrays.stream(Flavor.values()) + .filter(flavor -> finalRemainingString.startsWith(flavor.name().toLowerCase()) || + finalRemainingString.startsWith(flavor.shorthand.toLowerCase())) + .findFirst(); - if (remainingString.equals(raw.toLowerCase())) { - throw new RuntimeException("Unable to determine build flavor from '" + raw +"'"); + if (matchedFlavor.isPresent()) { + Flavor flavor = matchedFlavor.get(); + remainingString = remainingString.startsWith(flavor.name().toLowerCase()) ? + remainingString.substring(flavor.name().length()) : + remainingString.substring(flavor.shorthand.length()); + builder.flavor(flavor); + } else { + throw new IllegalArgumentException("Unable to determine build flavor from '" + raw +"'"); } try { @@ -59,7 +61,7 @@ public static Version fromString(final String raw) throws RuntimeException { } return builder.build(); } catch (Exception e) { - throw new RuntimeException("Unable to parse version numbers from the string '" + raw + "'\r\n", e); + throw new IllegalArgumentException("Unable to parse version numbers from the string '" + raw + "'\r\n", e); } } } diff --git a/transformation/src/main/java/org/opensearch/migrations/VersionMatchers.java b/transformation/src/main/java/org/opensearch/migrations/VersionMatchers.java index 4f9efc7c8..6ebc77fc1 100644 --- a/transformation/src/main/java/org/opensearch/migrations/VersionMatchers.java +++ b/transformation/src/main/java/org/opensearch/migrations/VersionMatchers.java @@ -26,18 +26,14 @@ private static Predicate matchesMajorVersion(final Version version) { } private static Predicate matchesMinorVersion(final Version version) { - return other -> { - return matchesMajorVersion(version) - .and(other2 -> version.getMinor() == other2.getMinor()) - .test(other); - }; + return other -> matchesMajorVersion(version) + .and(other2 -> version.getMinor() == other2.getMinor()) + .test(other); } private static Predicate equalOrGreaterThanMinorVersion(final Version version) { - return other -> { - return matchesMajorVersion(version) - .and(other2 -> version.getMinor() <= other2.getMinor()) - .test(other); - }; + return other -> matchesMajorVersion(version) + .and(other2 -> version.getMinor() <= other2.getMinor()) + .test(other); } } diff --git a/transformation/src/main/java/org/opensearch/migrations/transformation/CanApplyResult.java b/transformation/src/main/java/org/opensearch/migrations/transformation/CanApplyResult.java index 8889046f1..a1cdac073 100644 --- a/transformation/src/main/java/org/opensearch/migrations/transformation/CanApplyResult.java +++ b/transformation/src/main/java/org/opensearch/migrations/transformation/CanApplyResult.java @@ -7,8 +7,9 @@ * The result after checking if a transformer can be applied to an entity */ public abstract class CanApplyResult { - public final static CanApplyResult YES = new Yes(); - public final static CanApplyResult NO = new No(); + private CanApplyResult() {} + public static final CanApplyResult YES = new Yes(); + public static final CanApplyResult NO = new No(); /** Yes, the transformation can be applied */ public static final class Yes extends CanApplyResult {} diff --git a/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java b/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java index 77aab8c67..336feec3c 100644 --- a/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java +++ b/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java @@ -11,7 +11,7 @@ /** * Supports transformation of the Index Mapping types that were changed from mutliple types to a single type between ES 6 to ES 7 - * + * * Example: * Starting state (ES 6): * { @@ -26,7 +26,7 @@ * } * ] * } - * + * * Ending state (ES 7): * { * "mappings": { @@ -39,30 +39,32 @@ */ public class IndexMappingTypeRemoval implements TransformationRule { + public static final String MAPPINGS_KEY = "mappings"; + @Override public CanApplyResult canApply(final Index index) { - final var mappingNode = index.rawJson().get("mappings"); + final var mappingNode = index.rawJson().get(MAPPINGS_KEY); if (mappingNode == null) { return CanApplyResult.NO; } - // Detect unsupported multiple type mappings, eg: - // { "mappings": [{ "foo": {...}}, { "bar": {...} }] } - // { "mappings": [{ "foo": {...}, "bar": {...} }] } - if (mappingNode.isArray()) { - if (mappingNode.size() > 1 || mappingNode.get(0).size() > 1) { - return new Unsupported("Multiple mapping types are not supported"); - } + + // Detect unsupported multiple type mappings: + // 1.
{"mappings": [{ "foo": {...} }, { "bar": {...} }]}
+ // 2.
{"mappings": [{ "foo": {...}, "bar": {...}  }]}
+ if (mappingNode.isArray() && (mappingNode.size() > 1 || mappingNode.get(0).size() > 1)) { + return new Unsupported("Multiple mapping types are not supported"); } - // Detect if there is no intermediate type node - // { "mappings": { "_doc": { "properties": { } } } } + // Check for absence of intermediate type node + // 1.
{"mappings": {"properties": {...} }}
if (mappingNode.isObject() && mappingNode.get("properties") != null) { return CanApplyResult.NO; } - // There is a type under mappings, e.g. { "mappings": [{ "foo": {...} }] } + // There is a type under mappings + // 1.
{ "mappings": [{ "foo": {...} }] }
return CanApplyResult.YES; } @@ -72,7 +74,7 @@ public boolean applyTransformation(final Index index) { return false; } - final var mappingsNode = index.rawJson().get("mappings"); + final var mappingsNode = index.rawJson().get(MAPPINGS_KEY); // Handle array case if (mappingsNode.isArray()) { final var mappingsInnerNode = (ObjectNode) mappingsNode.get(0); @@ -82,7 +84,7 @@ public boolean applyTransformation(final Index index) { mappingsInnerNode.remove(typeName); typeNode.fields().forEachRemaining(node -> mappingsInnerNode.set(node.getKey(), node.getValue())); - index.rawJson().set("mappings", mappingsInnerNode); + index.rawJson().set(MAPPINGS_KEY, mappingsInnerNode); } if (mappingsNode.isObject()) { diff --git a/transformation/src/test/java/org/opensearch/migrations/VersionTest.java b/transformation/src/test/java/org/opensearch/migrations/VersionTest.java index 17b0150d7..62f936224 100644 --- a/transformation/src/test/java/org/opensearch/migrations/VersionTest.java +++ b/transformation/src/test/java/org/opensearch/migrations/VersionTest.java @@ -10,7 +10,7 @@ public class VersionTest { @Test void fromString() throws ParseException { - var expected = Version.builder().flavor(Flavor.OpenSearch).major(1).minor(3).patch(18).build(); + var expected = Version.builder().flavor(Flavor.OPENSEARCH).major(1).minor(3).patch(18).build(); assertThat(Version.fromString("OpenSearch 1.3.18"), equalTo(expected)); assertThat(Version.fromString("Opensearch 1.3.18"), equalTo(expected)); assertThat(Version.fromString("Opensearch 1.3.18"), equalTo(expected)); @@ -23,7 +23,7 @@ void fromString() throws ParseException { @Test void fromString_defaultPatch() throws ParseException { - var expected = Version.builder().flavor(Flavor.OpenSearch).major(1).minor(3).patch(0).build(); + var expected = Version.builder().flavor(Flavor.OPENSEARCH).major(1).minor(3).patch(0).build(); assertThat(Version.fromString("OpenSearch 1.3.0"), equalTo(expected)); assertThat(Version.fromString("OpenSearch 1.3.x"), equalTo(expected)); assertThat(Version.fromString("OpenSearch 1.3"), equalTo(expected)); @@ -31,7 +31,7 @@ void fromString_defaultPatch() throws ParseException { @Test void fromString_defaultMinor() throws ParseException { - var expected = Version.builder().flavor(Flavor.OpenSearch).major(1).minor(0).patch(0).build(); + var expected = Version.builder().flavor(Flavor.OPENSEARCH).major(1).minor(0).patch(0).build(); assertThat(Version.fromString("OpenSearch 1.0.0"), equalTo(expected)); assertThat(Version.fromString("OpenSearch 1.0"), equalTo(expected)); assertThat(Version.fromString("OpenSearch 1.x.x"), equalTo(expected)); From a902b14d7b3cec2be94e2b8552d456081549dc2e Mon Sep 17 00:00:00 2001 From: Greg Schohn Date: Wed, 18 Sep 2024 17:23:07 -0400 Subject: [PATCH 36/38] Make the default TLS protocol setup for the CaptureProxy use TLS1.2 & 1.3. When 'plugins.security.ssl.http.enabled: false', don't setup HTTPS. Add unit tests for parsing. Signed-off-by: Greg Schohn --- .../proxyserver/CaptureProxy.java | 17 +++++--- .../proxyserver/CaptureProxySetupTest.java | 41 +++++++++++++++++++ 2 files changed, 52 insertions(+), 6 deletions(-) diff --git a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java index 1a9e11a8a..4d4981246 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/main/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxy.java @@ -73,6 +73,7 @@ public class CaptureProxy { private static final String HTTPS_CONFIG_PREFIX = "plugins.security.ssl.http."; public static final String DEFAULT_KAFKA_CLIENT_ID = "HttpCaptureProxyProducer"; + public static final String SUPPORTED_TLS_PROTOCOLS_LIST_KEY = "plugins.security.ssl.http.enabled_protocols"; public static class Parameters { @Parameter(required = false, @@ -204,8 +205,14 @@ protected static Settings getSettings(@NonNull String configFile) { .entrySet().stream() .filter(kvp -> kvp.getKey().startsWith(HTTPS_CONFIG_PREFIX)) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + if (!httpsSettings.containsKey(SUPPORTED_TLS_PROTOCOLS_LIST_KEY)) { + httpsSettings.put(SUPPORTED_TLS_PROTOCOLS_LIST_KEY, List.of("TLSv1.2", "TLSv1.3")); + } return Settings.builder().loadFromMap(httpsSettings) + // Don't bother with configurations the 'transport' (port 9300), which the plugin that we're using + // will also configure (& fail) otherwise. We only use the plugin to setup security for the 'http' + // port and then move the SSLEngine into our implementation. .put(SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED, false) .put("path.home", configParentDirStr) .build(); @@ -376,12 +383,10 @@ public static void main(String[] args) throws InterruptedException, IOException ); var sksOp = Optional.ofNullable(params.sslConfigFilePath) - .map( - sslConfigFile -> new DefaultSecurityKeyStore( - getSettings(sslConfigFile), - Paths.get(sslConfigFile).toAbsolutePath().getParent() - ) - ); + .map(sslConfigFile -> new DefaultSecurityKeyStore( + getSettings(sslConfigFile), + Paths.get(sslConfigFile).toAbsolutePath().getParent())) + .filter(sks -> sks.sslHTTPProvider != null); sksOp.ifPresent(DefaultSecurityKeyStore::initHttpSSLConfig); var proxy = new NettyScanningHttpProxy(params.frontsidePort); diff --git a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxySetupTest.java b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxySetupTest.java index c8ccb0d23..4e5ff226e 100644 --- a/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxySetupTest.java +++ b/TrafficCapture/trafficCaptureProxyServer/src/test/java/org/opensearch/migrations/trafficcapture/proxyserver/CaptureProxySetupTest.java @@ -1,6 +1,9 @@ package org.opensearch.migrations.trafficcapture.proxyserver; import java.io.IOException; +import java.nio.file.Files; +import java.util.List; +import java.util.Map; import java.util.Properties; import org.apache.kafka.clients.CommonClientConfigs; @@ -12,6 +15,7 @@ public class CaptureProxySetupTest { public final static String kafkaBrokerString = "invalid:9092"; + public static final String TLS_PROTOCOLS_KEY = "plugins.security.ssl.http.enabled_protocols"; @Test public void testBuildKafkaPropertiesBaseCase() throws IOException { @@ -111,4 +115,41 @@ public void testBuildKafkaPropertiesWithPropertyFile() throws IOException { // Settings needed for other passed arguments (i.e. --enableMSKAuth) are ignored by property file Assertions.assertEquals("SASL_SSL", props.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG)); } + + @Test + public void testTlsParametersAreProperlyRead() throws Exception { + for (var kvp : Map.of( + "[ TLSv1.3, TLSv1.2 ]", List.of("TLSv1.3","TLSv1.2"), + "[ TLSv1.2, TLSv1.3 ]", List.of("TLSv1.2","TLSv1.3"), + "\n - TLSv1.2\n - TLSv1.3", List.of("TLSv1.2","TLSv1.3"), + "\n - TLSv1.2", List.of("TLSv1.2")) + .entrySet()) + { + testTlsParametersAreProperlyRead(TLS_PROTOCOLS_KEY + ": " + kvp.getKey(), kvp.getValue()); + } + } + + @Test + public void testNoProtocolConfigDefaultsToSecureOnesOnly() throws Exception { + testTlsParametersAreProperlyRead("", List.of("TLSv1.2","TLSv1.3")); + } + + public void testTlsParametersAreProperlyRead(String protocolsBlockString, List expectedList) + throws Exception + { + var tempFile = Files.createTempFile("captureProxy_tlsConfig", "yaml"); + try { + Files.writeString(tempFile, "plugins.security.ssl.http.enabled: true\n" + + "plugins.security.ssl.http.pemcert_filepath: esnode.pem\n" + + "plugins.security.ssl.http.pemkey_filepath: esnode-key.pem\n" + + "plugins.security.ssl.http.pemtrustedcas_filepath: root-ca.pem\n" + + protocolsBlockString); + + var settings = CaptureProxy.getSettings(tempFile.toAbsolutePath().toString()); + Assertions.assertEquals(String.join(", ", expectedList), + String.join(", ", settings.getAsList(TLS_PROTOCOLS_KEY))); + } finally { + Files.deleteIfExists(tempFile); + } + } } From 04a07bb3fe43b1707f5a3572a38dbc925ab8fc9e Mon Sep 17 00:00:00 2001 From: Peter Nied Date: Wed, 18 Sep 2024 18:13:08 -0500 Subject: [PATCH 37/38] Fix code smells in Metadata Cli (#972) * Fix code smell issues in Metadata Cli - Reduce access on fields and make final - Create indentation function to ensure consistant output, reduce duplicate logic - Improve code coverage on command line parsing code - Bonus fix bugs where some error messages were not being thrown Signed-off-by: Peter Nied * Add tests for the cli output of Items and Clusters Signed-off-by: Peter Nied --- MetadataMigration/build.gradle | 3 +- .../migrations/MetadataMigration.java | 18 +-- .../cli/ClusterReaderExtractor.java | 20 +++- .../opensearch/migrations/cli/Clusters.java | 10 +- .../org/opensearch/migrations/cli/Format.java | 15 +++ .../org/opensearch/migrations/cli/Items.java | 36 +++--- .../migrations/commands/Configure.java | 5 +- .../migrations/commands/ConfigureResult.java | 9 ++ .../migrations/commands/EvaluateResult.java | 21 ---- .../migrations/commands/MigrateResult.java | 21 ---- .../commands/MigrationItemResult.java | 22 ++++ .../migrations/commands/Result.java | 3 + .../migrations/MetadataMigrationTest.java | 53 +++++++++ .../cli/ClusterReaderExtractorTest.java | 105 ++++++++++++++++++ .../migrations/cli/ClustersTest.java | 55 +++++++++ .../opensearch/migrations/cli/ItemsTest.java | 77 +++++++++++++ .../commands/MigrationItemResultTest.java | 75 +++++++++++++ .../rfs/common/http/ConnectionContext.java | 4 +- testHelperFixtures/build.gradle | 1 + .../matchers/ContainsStringCount.java | 35 ++++++ .../migrations/matchers/HasLineCount.java | 34 ++++++ 21 files changed, 543 insertions(+), 79 deletions(-) create mode 100644 MetadataMigration/src/main/java/org/opensearch/migrations/cli/Format.java create mode 100644 MetadataMigration/src/test/java/org/opensearch/migrations/MetadataMigrationTest.java create mode 100644 MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClusterReaderExtractorTest.java create mode 100644 MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClustersTest.java create mode 100644 MetadataMigration/src/test/java/org/opensearch/migrations/cli/ItemsTest.java create mode 100644 MetadataMigration/src/test/java/org/opensearch/migrations/commands/MigrationItemResultTest.java create mode 100644 testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/ContainsStringCount.java create mode 100644 testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/HasLineCount.java diff --git a/MetadataMigration/build.gradle b/MetadataMigration/build.gradle index 155ac1f56..7a80f19d3 100644 --- a/MetadataMigration/build.gradle +++ b/MetadataMigration/build.gradle @@ -18,7 +18,8 @@ dependencies { implementation group: 'org.slf4j', name: 'slf4j-api' implementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl' - testImplementation testFixtures(project(path: ':RFS')) + testImplementation testFixtures(project(':RFS')) + testImplementation testFixtures(project(':testHelperFixtures')) testImplementation group: 'org.apache.logging.log4j', name: 'log4j-core' testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j2-impl' testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter-api' diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/MetadataMigration.java b/MetadataMigration/src/main/java/org/opensearch/migrations/MetadataMigration.java index ad9bc9128..7823e1e04 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/MetadataMigration.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/MetadataMigration.java @@ -77,7 +77,7 @@ public static void main(String[] args) throws Exception { result = meta.evaluate(evaluateArgs).execute(context); break; } - log.info(result.toString()); + log.atInfo().setMessage("{}").addArgument(result::asCliOutput).log(); System.exit(result.getExitCode()); } @@ -94,18 +94,20 @@ public Migrate migrate(MigrateOrEvaluateArgs arguments) { } private static void printTopLevelHelp(JCommander commander) { - log.info("Usage: [options] [command] [commandOptions]"); - log.info("Options:"); + var sb = new StringBuilder(); + sb.append("Usage: [options] [command] [commandOptions]"); + sb.append("Options:"); for (var parameter : commander.getParameters()) { - log.info(" " + parameter.getNames()); - log.info(" " + parameter.getDescription()); + sb.append(" " + parameter.getNames()); + sb.append(" " + parameter.getDescription()); } - log.info("Commands:"); + sb.append("Commands:"); for (var command : commander.getCommands().entrySet()) { - log.info(" " + command.getKey()); + sb.append(" " + command.getKey()); } - log.info("\nUse --help with a specific command for more information."); + sb.append("\nUse --help with a specific command for more information."); + log.info(sb.toString()); } private static void printCommandUsage(JCommander jCommander) { diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/ClusterReaderExtractor.java b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/ClusterReaderExtractor.java index 59a3f00fd..00ef9a4ce 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/ClusterReaderExtractor.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/ClusterReaderExtractor.java @@ -3,6 +3,7 @@ import java.nio.file.Path; import org.opensearch.migrations.MigrateOrEvaluateArgs; +import org.opensearch.migrations.Version; import org.opensearch.migrations.cluster.ClusterProviderRegistry; import org.opensearch.migrations.cluster.ClusterReader; @@ -11,14 +12,15 @@ import com.rfs.common.S3Repo; import com.rfs.common.S3Uri; import com.rfs.common.SourceRepo; +import com.rfs.common.http.ConnectionContext; import lombok.AllArgsConstructor; @AllArgsConstructor public class ClusterReaderExtractor { - final MigrateOrEvaluateArgs arguments; + private final MigrateOrEvaluateArgs arguments; - public ClusterReader extractClusterReader() { - if (arguments.fileSystemRepoPath != null && arguments.s3RepoUri != null && arguments.sourceArgs.host != null) { + public ClusterReader extractClusterReader() { + if (arguments.fileSystemRepoPath == null && arguments.s3RepoUri == null && arguments.sourceArgs.host == null) { throw new ParameterException("No details on the source cluster found, please supply a connection details or a snapshot"); } if ((arguments.s3RepoUri != null) && (arguments.s3Region == null || arguments.s3LocalDirPath == null)) { @@ -26,7 +28,7 @@ public ClusterReader extractClusterReader() { } if (arguments.sourceArgs != null && arguments.sourceArgs.host != null) { - return ClusterProviderRegistry.getRemoteReader(arguments.sourceArgs.toConnectionContext()); + return getRemoteReader(arguments.sourceArgs.toConnectionContext()); } SourceRepo repo = null; @@ -38,6 +40,14 @@ public ClusterReader extractClusterReader() { throw new ParameterException("Unable to find valid resource provider"); } - return ClusterProviderRegistry.getSnapshotReader(arguments.sourceVersion, repo); + return getSnapshotReader(arguments.sourceVersion, repo); + } + + ClusterReader getRemoteReader(ConnectionContext connection) { + return ClusterProviderRegistry.getRemoteReader(connection); + } + + ClusterReader getSnapshotReader(Version sourceVersion, SourceRepo repo) { + return ClusterProviderRegistry.getSnapshotReader(sourceVersion, repo); } } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Clusters.java b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Clusters.java index b967c0775..d29812cd6 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Clusters.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Clusters.java @@ -14,17 +14,17 @@ public class Clusters { private ClusterReader source; private ClusterWriter target; - public String toString() { + public String asCliOutput() { var sb = new StringBuilder(); sb.append("Clusters:" + System.lineSeparator()); if (getSource() != null) { - sb.append(" Source:" + System.lineSeparator()); - sb.append(" " + getSource() + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + "Source:" + System.lineSeparator()); + sb.append(Format.indentToLevel(2) + getSource() + System.lineSeparator()); sb.append(System.lineSeparator()); } if (getTarget() != null) { - sb.append(" Target:" + System.lineSeparator()); - sb.append(" " + getTarget() + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + "Target:" + System.lineSeparator()); + sb.append(Format.indentToLevel(2) + getTarget() + System.lineSeparator()); sb.append(System.lineSeparator()); } return sb.toString(); diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Format.java b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Format.java new file mode 100644 index 000000000..b4eb989a6 --- /dev/null +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Format.java @@ -0,0 +1,15 @@ +package org.opensearch.migrations.cli; + +import lombok.experimental.UtilityClass; + +/** Shared formatting for command line interface components */ +@UtilityClass +public class Format { + + private static final String INDENT = " "; + + /** Indents to a given level for printing to the console */ + public static String indentToLevel(final int level) { + return INDENT.repeat(level); + } +} diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Items.java b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Items.java index 3d073f772..b2a12c459 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Items.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/cli/Items.java @@ -5,6 +5,7 @@ import lombok.Builder; import lombok.Data; +import lombok.NonNull; /** * Either items that are candidates for migration or have been migrated; @@ -12,37 +13,42 @@ @Builder @Data public class Items { - public boolean dryRun; - public List indexTemplates; - public List componentTemplates; - public List indexes; - public List aliases; + static final String NONE_FOUND_MARKER = ""; + private final boolean dryRun; + @NonNull + private final List indexTemplates; + @NonNull + private final List componentTemplates; + @NonNull + private final List indexes; + @NonNull + private final List aliases; - public String toString() { + public String asCliOutput() { var sb = new StringBuilder(); if (isDryRun()) { sb.append("Migration Candidates:" + System.lineSeparator()); } else { sb.append("Migrated Items:" + System.lineSeparator()); } - sb.append(" Index Templates:" + System.lineSeparator()); - sb.append(" " + getPrintableList(getIndexTemplates()) + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + "Index Templates:" + System.lineSeparator()); + sb.append(Format.indentToLevel(2) + getPrintableList(getIndexTemplates()) + System.lineSeparator()); sb.append(System.lineSeparator()); - sb.append(" Component Templates:" + System.lineSeparator()); - sb.append(" " + getPrintableList(getComponentTemplates()) + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + "Component Templates:" + System.lineSeparator()); + sb.append(Format.indentToLevel(2) +getPrintableList(getComponentTemplates()) + System.lineSeparator()); sb.append(System.lineSeparator()); - sb.append(" Indexes:" + System.lineSeparator()); - sb.append(" " + getPrintableList(getIndexes()) + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + "Indexes:" + System.lineSeparator()); + sb.append(Format.indentToLevel(2) + getPrintableList(getIndexes()) + System.lineSeparator()); sb.append(System.lineSeparator()); - sb.append(" Aliases:" + System.lineSeparator()); - sb.append(" " + getPrintableList(getAliases()) + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + "Aliases:" + System.lineSeparator()); + sb.append(Format.indentToLevel(2) +getPrintableList(getAliases()) + System.lineSeparator()); sb.append(System.lineSeparator()); return sb.toString(); } private String getPrintableList(List list) { if (list == null || list.isEmpty()) { - return ""; + return NONE_FOUND_MARKER; } return list.stream().sorted().collect(Collectors.joining(", ")); } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Configure.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Configure.java index 9aef0d9d9..b958c864e 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Configure.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Configure.java @@ -6,7 +6,8 @@ public class Configure { public ConfigureResult execute() { - log.atError().setMessage("configure is not supported").log(); - return new ConfigureResult(9999); + var message = "configure is not supported"; + log.atError().setMessage(message).log(); + return new ConfigureResult(9999, message); } } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/ConfigureResult.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/ConfigureResult.java index dac296884..294f4bb9a 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/ConfigureResult.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/ConfigureResult.java @@ -2,9 +2,18 @@ import lombok.AllArgsConstructor; import lombok.Getter; +import lombok.ToString; @AllArgsConstructor +@ToString public class ConfigureResult implements Result { @Getter private final int exitCode; + + @Getter + private final String errorMessage; + + public String asCliOutput() { + return this.toString(); + } } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/EvaluateResult.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/EvaluateResult.java index 06f67e602..8f4273e06 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/EvaluateResult.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/EvaluateResult.java @@ -1,7 +1,5 @@ package org.opensearch.migrations.commands; -import org.apache.logging.log4j.util.Strings; - import org.opensearch.migrations.cli.Clusters; import org.opensearch.migrations.cli.Items; @@ -15,23 +13,4 @@ public class EvaluateResult implements MigrationItemResult { private final Items items; private final String errorMessage; private final int exitCode; - - public String toString() { - var sb = new StringBuilder(); - if (getClusters() != null) { - sb.append(getClusters() + System.lineSeparator()); - } - if (getItems() != null) { - sb.append(getItems() + System.lineSeparator()); - } - sb.append("Results:" + System.lineSeparator()); - if (Strings.isNotBlank(getErrorMessage())) { - sb.append(" Issue(s) detected" + System.lineSeparator()); - sb.append("Issues:" + System.lineSeparator()); - sb.append(" " + getErrorMessage() + System.lineSeparator()); - } else { - sb.append(" " + getExitCode() + " issue(s) detected" + System.lineSeparator()); - } - return sb.toString(); - } } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrateResult.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrateResult.java index 385697dfb..51b2ba0b2 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrateResult.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrateResult.java @@ -1,7 +1,5 @@ package org.opensearch.migrations.commands; -import org.apache.logging.log4j.util.Strings; - import org.opensearch.migrations.cli.Clusters; import org.opensearch.migrations.cli.Items; @@ -15,23 +13,4 @@ public class MigrateResult implements MigrationItemResult { private final Items items; private final String errorMessage; private final int exitCode; - - public String toString() { - var sb = new StringBuilder(); - if (getClusters() != null) { - sb.append(getClusters() + System.lineSeparator()); - } - if (getItems() != null) { - sb.append(getItems() + System.lineSeparator()); - } - sb.append("Results:" + System.lineSeparator()); - if (Strings.isNotBlank(getErrorMessage())) { - sb.append(" Issue(s) detected" + System.lineSeparator()); - sb.append("Issues:" + System.lineSeparator()); - sb.append(" " + getErrorMessage() + System.lineSeparator()); - } else { - sb.append(" " + getExitCode() + " issue(s) detected" + System.lineSeparator()); - } - return sb.toString(); - } } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrationItemResult.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrationItemResult.java index d8c3d342c..67396f11a 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrationItemResult.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/MigrationItemResult.java @@ -1,10 +1,32 @@ package org.opensearch.migrations.commands; +import org.apache.logging.log4j.util.Strings; + import org.opensearch.migrations.cli.Clusters; +import org.opensearch.migrations.cli.Format; import org.opensearch.migrations.cli.Items; /** All shared cli result information */ public interface MigrationItemResult extends Result { Clusters getClusters(); Items getItems(); + + default String asCliOutput() { + var sb = new StringBuilder(); + if (getClusters() != null) { + sb.append(getClusters().asCliOutput() + System.lineSeparator()); + } + if (getItems() != null) { + sb.append(getItems().asCliOutput() + System.lineSeparator()); + } + sb.append("Results:" + System.lineSeparator()); + if (Strings.isNotBlank(getErrorMessage())) { + sb.append(Format.indentToLevel(1) + "Issue(s) detected" + System.lineSeparator()); + sb.append("Issues:" + System.lineSeparator()); + sb.append(Format.indentToLevel(1) + getErrorMessage() + System.lineSeparator()); + } else { + sb.append(Format.indentToLevel(1) + getExitCode() + " issue(s) detected" + System.lineSeparator()); + } + return sb.toString(); + } } diff --git a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Result.java b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Result.java index 552d706fb..95cc78016 100644 --- a/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Result.java +++ b/MetadataMigration/src/main/java/org/opensearch/migrations/commands/Result.java @@ -3,4 +3,7 @@ /** All shared cli result information */ public interface Result { int getExitCode(); + String getErrorMessage(); + /** Render this result as a string for displaying on the command line */ + String asCliOutput(); } diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/MetadataMigrationTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/MetadataMigrationTest.java new file mode 100644 index 000000000..37b59b551 --- /dev/null +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/MetadataMigrationTest.java @@ -0,0 +1,53 @@ +package org.opensearch.migrations; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import org.opensearch.migrations.testutils.CloseableLogSetup; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; + +public class MetadataMigrationTest { + + @Test + void testMain_expectTopLevelHelp() throws Exception { + var testCases = List.of( + new String[]{}, + new String[]{"-h"}, + new String[]{"--help"} + ); + for (var testCase : testCases) { + try (var closeableLogSetup = new CloseableLogSetup(MetadataMigration.class.getName())) { + MetadataMigration.main(testCase); + + var logEvents = closeableLogSetup.getLogEvents(); + + assertThat(logEvents, hasSize(2)); + assertThat(logEvents.get(0), containsString("Command line arguments")); + assertThat(logEvents.get(1), containsString("Usage: [options] [command] [commandOptions]")); + } + } + } + + @Test + void testMain_expectCommandHelp() throws Exception { + var testCases = List.of( + new String[]{"evaluate", "-h"}, + new String[]{"migrate", "--help"} + ); + for (var testCase : testCases) { + try (var closeableLogSetup = new CloseableLogSetup(MetadataMigration.class.getName())) { + MetadataMigration.main(testCase); + + var logEvents = closeableLogSetup.getLogEvents(); + + assertThat(logEvents, hasSize(2)); + assertThat(logEvents.get(0), containsString("Command line arguments")); + assertThat(logEvents.get(1), containsString("Usage: " + testCase[0] + " [options]")); + } + } + } +} diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClusterReaderExtractorTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClusterReaderExtractorTest.java new file mode 100644 index 000000000..dd6325aa5 --- /dev/null +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClusterReaderExtractorTest.java @@ -0,0 +1,105 @@ +package org.opensearch.migrations.cli; + +import org.junit.jupiter.api.Test; + +import org.opensearch.migrations.MigrateOrEvaluateArgs; +import org.opensearch.migrations.Version; +import org.opensearch.migrations.cluster.ClusterReader; + +import com.beust.jcommander.ParameterException; +import com.rfs.common.FileSystemRepo; +import com.rfs.common.S3Repo; +import com.rfs.common.http.ConnectionContext; +import org.mockito.ArgumentCaptor; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; + + +public class ClusterReaderExtractorTest { + @Test + void testExtractClusterReader_noSnapshotOrRemote() { + var args = new MigrateOrEvaluateArgs(); + var extractor = new ClusterReaderExtractor(args); + + var exception = assertThrows(ParameterException.class, () -> extractor.extractClusterReader()); + assertThat(args.toString(), exception.getMessage(), equalTo("No details on the source cluster found, please supply a connection details or a snapshot")); + } + + @Test + void testExtractClusterReader_invalidS3Snapshot_missingRegion() { + var args = new MigrateOrEvaluateArgs(); + args.s3RepoUri = "foo.bar"; + args.s3LocalDirPath = "fizz.buzz"; + var extractor = new ClusterReaderExtractor(args); + + var exception = assertThrows(ParameterException.class, () -> extractor.extractClusterReader()); + assertThat(exception.getMessage(), equalTo("If an s3 repo is being used, s3-region and s3-local-dir-path must be set")); + } + + @Test + void testExtractClusterReader_invalidS3Snapshot_missingLocalDirPath() { + var args = new MigrateOrEvaluateArgs(); + args.s3RepoUri = "foo.bar"; + args.s3Region = "us-west-1"; + var extractor = new ClusterReaderExtractor(args); + + var exception = assertThrows(ParameterException.class, () -> extractor.extractClusterReader()); + assertThat(exception.getMessage(), equalTo("If an s3 repo is being used, s3-region and s3-local-dir-path must be set")); + } + + @Test + void testExtractClusterReader_validLocalSnapshot() { + var args = new MigrateOrEvaluateArgs(); + args.fileSystemRepoPath = "foo.bar"; + args.sourceVersion = Version.fromString("OS 1.1.1"); + var extractor = spy(new ClusterReaderExtractor(args)); + var mockReader = mock(ClusterReader.class); + doReturn(mockReader).when(extractor).getSnapshotReader(eq(args.sourceVersion), any(FileSystemRepo.class)); + + var result = extractor.extractClusterReader(); + assertThat(result, equalTo(mockReader)); + + verify(extractor).getSnapshotReader(eq(args.sourceVersion), any(FileSystemRepo.class)); + } + + @Test + void testExtractClusterReader_validS3Snapshot() { + var args = new MigrateOrEvaluateArgs(); + args.s3RepoUri = "foo.bar"; + args.s3Region = "us-west-1"; + args.s3LocalDirPath = "fizz.buzz"; + args.sourceVersion = Version.fromString("OS 9.9.9"); + var extractor = spy(new ClusterReaderExtractor(args)); + var mockReader = mock(ClusterReader.class); + doReturn(mockReader).when(extractor).getSnapshotReader(eq(args.sourceVersion), any(S3Repo.class)); + + var result = extractor.extractClusterReader(); + assertThat(result, equalTo(mockReader)); + + verify(extractor).getSnapshotReader(eq(args.sourceVersion), any(S3Repo.class)); + } + + @Test + void testExtractClusterReader_validRemote() { + var args = new MigrateOrEvaluateArgs(); + args.sourceArgs.host = "http://foo.bar"; + var extractor = spy(new ClusterReaderExtractor(args)); + var mockReader = mock(ClusterReader.class); + doReturn(mockReader).when(extractor).getRemoteReader(any()); + + var result = extractor.extractClusterReader(); + assertThat(result, equalTo(mockReader)); + + var foundContext = ArgumentCaptor.forClass(ConnectionContext.class); + verify(extractor).getRemoteReader(foundContext.capture()); + assertThat(args.sourceArgs.toConnectionContext(), equalTo(foundContext.getValue())); + } +} diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClustersTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClustersTest.java new file mode 100644 index 000000000..6a037bf48 --- /dev/null +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ClustersTest.java @@ -0,0 +1,55 @@ +package org.opensearch.migrations.cli; + +import org.junit.jupiter.api.Test; + +import org.opensearch.migrations.cluster.ClusterReader; +import org.opensearch.migrations.cluster.ClusterWriter; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.migrations.matchers.HasLineCount.hasLineCount; +import static org.mockito.Mockito.mock; + +public class ClustersTest { + @Test + void testAsString_empty() { + var clusters = Clusters.builder().build(); + + var result = clusters.asCliOutput(); + + assertThat(result, containsString("Clusters:")); + assertThat(result, not(containsString("Source:"))); + assertThat(result, not(containsString("Target:"))); + assertThat(result, hasLineCount(1)); + } + + @Test + void testAsString_withSource() { + var clusters = Clusters.builder() + .source(mock(ClusterReader.class)) + .build(); + + var result = clusters.asCliOutput(); + + assertThat(result, containsString("Clusters:")); + assertThat(result, containsString("Source:")); + assertThat(result, not(containsString("Target:"))); + assertThat(result, hasLineCount(3)); + } + + @Test + void testAsString_withSourceAndTarget() { + var clusters = Clusters.builder() + .source(mock(ClusterReader.class)) + .target(mock(ClusterWriter.class)) + .build(); + + var result = clusters.asCliOutput(); + + assertThat(result, containsString("Clusters:")); + assertThat(result, containsString("Source:")); + assertThat(result, containsString("Target:")); + assertThat(result, hasLineCount(6)); + } +} diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ItemsTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ItemsTest.java new file mode 100644 index 000000000..6a6f33930 --- /dev/null +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/cli/ItemsTest.java @@ -0,0 +1,77 @@ +package org.opensearch.migrations.cli; + +import java.util.List; + +import org.junit.jupiter.api.Test; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.migrations.matchers.ContainsStringCount.containsStringCount; +import static org.opensearch.migrations.matchers.HasLineCount.hasLineCount; + +public class ItemsTest { + @Test + void testAsString_empty() { + var items = Items.builder() + .indexTemplates(List.of()) + .componentTemplates(List.of()) + .indexes(List.of()) + .aliases(List.of()) + .build(); + + var result = items.asCliOutput(); + + assertThat(result, containsString("Migrated Items:")); + assertThat(result, containsString("Index Templates:")); + assertThat(result, containsString("Component Templates:")); + assertThat(result, containsString("Indexes:")); + assertThat(result, containsString("Aliases:")); + assertThat(result, containsStringCount(Items.NONE_FOUND_MARKER, 4)); + assertThat(result, hasLineCount(12)); + } + + @Test + void testAsString_full() { + var items = Items.builder() + .indexTemplates(List.of("it1", "it2")) + .componentTemplates(List.of("ct1", "ct2")) + .indexes(List.of("i1", "i2")) + .aliases(List.of("a1", "a2")) + .build(); + + var result = items.asCliOutput(); + + assertThat(result, containsString("Migrated Items:")); + assertThat(result, containsString("Index Templates:")); + assertThat(result, containsString("it1, it2")); + assertThat(result, containsString("Component Templates:")); + assertThat(result, containsString("ct1, ct2")); + assertThat(result, containsString("Indexes:")); + assertThat(result, containsString("i1, i2")); + assertThat(result, containsString("Aliases:")); + assertThat(result, containsString("a1, a2")); + assertThat(result, containsStringCount(Items.NONE_FOUND_MARKER, 0)); + assertThat(result, hasLineCount(12)); + } + + @Test + void testAsString_itemOrdering() { + var items = Items.builder() + .indexTemplates(List.of()) + .componentTemplates(List.of()) + .indexes(List.of("i1", "i2", "i5", "i3", "i4")) + .aliases(List.of()) + .build(); + + var result = items.asCliOutput(); + + assertThat(result, containsString("Migrated Items:")); + assertThat(result, containsString("Index Templates:")); + assertThat(result, containsString("i1, i2, i3, i4, i5")); + assertThat(result, containsString("Component Templates:")); + assertThat(result, containsString("Indexes:")); + assertThat(result, containsString("Aliases:")); + assertThat(result, containsStringCount(Items.NONE_FOUND_MARKER, 3)); + assertThat(result, hasLineCount(12)); + } +} diff --git a/MetadataMigration/src/test/java/org/opensearch/migrations/commands/MigrationItemResultTest.java b/MetadataMigration/src/test/java/org/opensearch/migrations/commands/MigrationItemResultTest.java new file mode 100644 index 000000000..f5e745e42 --- /dev/null +++ b/MetadataMigration/src/test/java/org/opensearch/migrations/commands/MigrationItemResultTest.java @@ -0,0 +1,75 @@ +package org.opensearch.migrations.commands; + +import org.junit.jupiter.api.Test; + +import org.opensearch.migrations.cli.Clusters; +import org.opensearch.migrations.cli.Items; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +public class MigrationItemResultTest { + @Test + void testAsString_fullResults_withMessage() { + var clusters = mock(Clusters.class); + var items = mock(Items.class); + var testObject = EvaluateResult.builder() + .clusters(clusters) + .items(items) + .exitCode(10) + .errorMessage("Full results") + .build(); + + var result = testObject.asCliOutput(); + assertThat(result, containsString("Issue(s) detected")); + assertThat(result, containsString("Issues:")); + + verify(clusters).asCliOutput(); + verify(items).asCliOutput(); + verifyNoMoreInteractions(items, clusters); + } + + @Test + void testAsString_fullResults_withNoMessage() { + var clusters = mock(Clusters.class); + var items = mock(Items.class); + var testObject = EvaluateResult.builder() + .clusters(clusters) + .items(items) + .exitCode(10) + .build(); + + var result = testObject.asCliOutput(); + assertThat(result, containsString("10 issue(s) detected")); + verify(clusters).asCliOutput(); + verify(items).asCliOutput(); + verifyNoMoreInteractions(items, clusters); + } + + @Test + void testAsString_noItems() { + var clusters = mock(Clusters.class); + var testObject = EvaluateResult.builder() + .clusters(clusters) + .exitCode(0) + .build(); + + var result = testObject.asCliOutput(); + assertThat(result, containsString("0 issue(s) detected")); + verify(clusters).asCliOutput(); + verifyNoMoreInteractions(clusters); + } + + @Test + void testAsString_nothing() { + var testObject = EvaluateResult.builder() + .exitCode(0) + .build(); + + var result = testObject.asCliOutput(); + assertThat(result, containsString("0 issue(s) detected")); + } +} diff --git a/RFS/src/main/java/com/rfs/common/http/ConnectionContext.java b/RFS/src/main/java/com/rfs/common/http/ConnectionContext.java index f28fb1cf2..05eafe29a 100644 --- a/RFS/src/main/java/com/rfs/common/http/ConnectionContext.java +++ b/RFS/src/main/java/com/rfs/common/http/ConnectionContext.java @@ -6,6 +6,7 @@ import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; @@ -14,6 +15,7 @@ * Stores the connection context for an Elasticsearch/OpenSearch cluster */ @Getter +@EqualsAndHashCode(exclude={"requestTransformer"}) @ToString(exclude={"requestTransformer"}) public class ConnectionContext { public enum Protocol { @@ -140,7 +142,7 @@ public static class TargetAdvancedArgs { public boolean compressionEnabled = false; } - @Getter + @Getter public static class SourceArgs implements IParams { @Parameter(names = { "--source-host" }, description = "The source host and port (e.g. http://localhost:9200)", required = false) diff --git a/testHelperFixtures/build.gradle b/testHelperFixtures/build.gradle index 70b0fd822..e5b3ab7ed 100644 --- a/testHelperFixtures/build.gradle +++ b/testHelperFixtures/build.gradle @@ -35,6 +35,7 @@ dependencies { testFixturesImplementation group: 'org.bouncycastle', name: 'bcprov-jdk18on' testFixturesImplementation group: 'org.bouncycastle', name: 'bcpkix-jdk18on' testFixturesImplementation group: 'org.junit.jupiter', name: 'junit-jupiter-api' + testFixturesImplementation group: 'org.hamcrest', name: 'hamcrest' testFixturesImplementation group: 'org.slf4j', name: 'slf4j-api' testFixturesApi group: 'org.testcontainers', name: 'testcontainers' testFixturesApi group: 'org.testcontainers', name: 'toxiproxy' diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/ContainsStringCount.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/ContainsStringCount.java new file mode 100644 index 000000000..0cec5c714 --- /dev/null +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/ContainsStringCount.java @@ -0,0 +1,35 @@ +package org.opensearch.migrations.matchers; + +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; + +import lombok.AllArgsConstructor; + +@AllArgsConstructor +public class ContainsStringCount extends TypeSafeMatcher { + private final String expectedString; + private final int expectedCount; + + @Override + public void describeTo(Description description) { + description.appendText("a string containing '" + expectedString + "' " + expectedCount + " times"); + } + + @Override + protected void describeMismatchSafely(String item, Description mismatchDescription) { + mismatchDescription.appendText("was found " + containsStringCount(item) + " times"); + } + + @Override + protected boolean matchesSafely(String item) { + return containsStringCount(item) == expectedCount; + } + + private int containsStringCount(String item) { + return item == null ? 0 : item.split(expectedString, -1).length - 1; + } + + public static ContainsStringCount containsStringCount(String s, int n) { + return new ContainsStringCount(s, n); + } +} diff --git a/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/HasLineCount.java b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/HasLineCount.java new file mode 100644 index 000000000..09203d8b1 --- /dev/null +++ b/testHelperFixtures/src/testFixtures/java/org/opensearch/migrations/matchers/HasLineCount.java @@ -0,0 +1,34 @@ +package org.opensearch.migrations.matchers; + +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; + +import lombok.AllArgsConstructor; + +@AllArgsConstructor +public class HasLineCount extends TypeSafeMatcher { + private int expectedLineCount; + + @Override + public void describeTo(Description description) { + description.appendText("a string with " + expectedLineCount + " lines"); + } + + @Override + protected void describeMismatchSafely(String item, Description mismatchDescription) { + mismatchDescription.appendText("was a string with " + item.split(System.lineSeparator()).length + " lines"); + } + + @Override + protected boolean matchesSafely(String item) { + return newlineCount(item) == expectedLineCount; + } + + private int newlineCount(String item) { + return item == null ? 0 : item.split("\n").length; + } + + public static HasLineCount hasLineCount(int n) { + return new HasLineCount(n); + } +} From b277fec58a73856a9297dcb6cd221e4af6a8f6d1 Mon Sep 17 00:00:00 2001 From: Chris Helma <25470211+chelma@users.noreply.github.com> Date: Wed, 18 Sep 2024 19:41:59 -0500 Subject: [PATCH 38/38] SonarQube RFS Versioned file fixes (#969) * SonarQube RFS Versioned file fixes Signed-off-by: Chris Helma * Cleaned up Versioned RFS classes Signed-off-by: Chris Helma * Spotless Signed-off-by: Chris Helma --------- Signed-off-by: Chris Helma Signed-off-by: Chris Helma <25470211+chelma@users.noreply.github.com> --- .../Transformer_ES_6_8_to_OS_2_11.java | 8 +- .../Transformer_ES_7_10_OS_2_11.java | 4 +- .../ElasticsearchConstants_ES_6_8.java | 17 +- .../IndexMetadataData_ES_6_8.java | 39 ++--- .../ShardMetadataData_ES_6_8.java | 154 ++++-------------- .../SnapshotMetadataData_ES_6_8.java | 63 +------ .../SnapshotMetadataFactory_ES_6_8.java | 3 +- .../SnapshotRepoData_ES_6_8.java | 63 +++---- .../SnapshotRepoProvider_ES_6_8.java | 21 ++- .../ElasticsearchConstants_ES_7_10.java | 17 +- .../IndexMetadataData_ES_7_10.java | 39 ++--- .../ShardMetadataData_ES_7_10.java | 154 ++++-------------- .../SnapshotMetadataData_ES_7_10.java | 63 +------ .../SnapshotMetadataFactory_ES_7_10.java | 3 +- .../SnapshotRepoData_ES_7_10.java | 74 ++++----- .../SnapshotRepoProvider_ES_7_10.java | 31 ++-- .../version_os_2_11/IndexCreator_OS_2_11.java | 2 +- .../IndexMetadataData_OS_2_11.java | 2 +- .../RemoteIndexMetadata.java | 2 +- .../SnapshotRepoData_ES_7_10Test.java | 8 +- .../transformation/entity/Entity.java | 2 +- .../rules/IndexMappingTypeRemoval.java | 6 +- .../rules/IndexMappingTypeRemovalTest.java | 4 +- 23 files changed, 224 insertions(+), 555 deletions(-) diff --git a/RFS/src/main/java/com/rfs/transformers/Transformer_ES_6_8_to_OS_2_11.java b/RFS/src/main/java/com/rfs/transformers/Transformer_ES_6_8_to_OS_2_11.java index 9c0af53bf..6810592e5 100644 --- a/RFS/src/main/java/com/rfs/transformers/Transformer_ES_6_8_to_OS_2_11.java +++ b/RFS/src/main/java/com/rfs/transformers/Transformer_ES_6_8_to_OS_2_11.java @@ -41,7 +41,7 @@ public GlobalMetadata transformGlobalMetadata(GlobalMetadata globalData) { var templateCopy = (ObjectNode) template.getValue().deepCopy(); var indexTemplate = (Index) () -> templateCopy; transformIndex(indexTemplate, IndexType.Template); - templates.set(template.getKey(), indexTemplate.rawJson()); + templates.set(template.getKey(), indexTemplate.getRawJson()); }); newRoot.set("templates", templates); } @@ -65,12 +65,12 @@ public GlobalMetadata transformGlobalMetadata(GlobalMetadata globalData) { public IndexMetadata transformIndexMetadata(IndexMetadata index) { var copy = index.deepCopy(); transformIndex(copy, IndexType.Concrete); - return new IndexMetadataData_OS_2_11(copy.rawJson(), copy.getId(), copy.getName()); + return new IndexMetadataData_OS_2_11(copy.getRawJson(), copy.getId(), copy.getName()); } private void transformIndex(Index index, IndexType type) { - logger.debug("Original Object: " + index.rawJson().toString()); - var newRoot = index.rawJson(); + logger.debug("Original Object: " + index.getRawJson().toString()); + var newRoot = index.getRawJson(); switch (type) { case Concrete: diff --git a/RFS/src/main/java/com/rfs/transformers/Transformer_ES_7_10_OS_2_11.java b/RFS/src/main/java/com/rfs/transformers/Transformer_ES_7_10_OS_2_11.java index 2809c363b..31e268c70 100644 --- a/RFS/src/main/java/com/rfs/transformers/Transformer_ES_7_10_OS_2_11.java +++ b/RFS/src/main/java/com/rfs/transformers/Transformer_ES_7_10_OS_2_11.java @@ -86,9 +86,9 @@ public GlobalMetadata transformGlobalMetadata(GlobalMetadata metaData) { @Override public IndexMetadata transformIndexMetadata(IndexMetadata indexData) { - logger.debug("Original Object: " + indexData.rawJson().toString()); + logger.debug("Original Object: " + indexData.getRawJson().toString()); var copy = indexData.deepCopy(); - var newRoot = copy.rawJson(); + var newRoot = copy.getRawJson(); TransformFunctions.removeIntermediateMappingsLevels(newRoot); diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/ElasticsearchConstants_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/ElasticsearchConstants_ES_6_8.java index a74e69f22..98856c6c5 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/ElasticsearchConstants_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/ElasticsearchConstants_ES_6_8.java @@ -1,11 +1,15 @@ package com.rfs.version_es_6_8; +import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; + public class ElasticsearchConstants_ES_6_8 { + private ElasticsearchConstants_ES_6_8() {} + public static final int BUFFER_SIZE_IN_BYTES; public static final SmileFactory SMILE_FACTORY; public static final String SOFT_DELETES_FIELD; @@ -18,12 +22,13 @@ public class ElasticsearchConstants_ES_6_8 { // Taken from: // https://github.com/elastic/elasticsearch/blob/6.8/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java#L55 - SmileFactory smileFactory = new SmileFactory(); - smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); - smileFactory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); - smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); - smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, false); - SMILE_FACTORY = smileFactory; + SMILE_FACTORY = SmileFactory.builder() + .configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false) + .configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false) + .build(); + + SMILE_FACTORY.disable(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT); + SMILE_FACTORY.disable(JsonParser.Feature.STRICT_DUPLICATE_DETECTION); // Soft Deletes were added in 7.0 SOFT_DELETES_FIELD = ""; diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/IndexMetadataData_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/IndexMetadataData_ES_6_8.java index 5e3264283..45ee64f82 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/IndexMetadataData_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/IndexMetadataData_ES_6_8.java @@ -5,30 +5,29 @@ import com.rfs.models.IndexMetadata; import com.rfs.transformers.TransformFunctions; +import lombok.Getter; public class IndexMetadataData_ES_6_8 implements IndexMetadata { - private ObjectNode root; + @Getter + private final ObjectNode rawJson; private ObjectNode mappings; private ObjectNode settings; - private String indexId; - private String indexName; + @Getter + private final String id; + @Getter + private final String name; public IndexMetadataData_ES_6_8(ObjectNode root, String indexId, String indexName) { - this.root = root; + this.rawJson = root; this.mappings = null; this.settings = null; - this.indexId = indexId; - this.indexName = indexName; + this.id = indexId; + this.name = indexName; } @Override public ObjectNode getAliases() { - return (ObjectNode) root.get("aliases"); - } - - @Override - public String getId() { - return indexId; + return (ObjectNode) rawJson.get("aliases"); } @Override @@ -37,17 +36,12 @@ public JsonNode getMappings() { return mappings; } - ObjectNode mappingsNode = (ObjectNode) root.get("mappings"); + ObjectNode mappingsNode = (ObjectNode) rawJson.get("mappings"); mappings = mappingsNode; return mappings; } - @Override - public String getName() { - return indexName; - } - @Override public int getNumberOfShards() { return this.getSettings().get("index").get("number_of_shards").asInt(); @@ -59,20 +53,15 @@ public ObjectNode getSettings() { return settings; } - ObjectNode treeSettings = TransformFunctions.convertFlatSettingsToTree((ObjectNode) root.get("settings")); + ObjectNode treeSettings = TransformFunctions.convertFlatSettingsToTree((ObjectNode) rawJson.get("settings")); settings = treeSettings; return settings; } - @Override - public ObjectNode rawJson() { - return root; - } - @Override public IndexMetadata deepCopy() { - return new IndexMetadataData_ES_6_8(root.deepCopy(), indexId, indexName); + return new IndexMetadataData_ES_6_8(rawJson.deepCopy(), id, name); } } diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/ShardMetadataData_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/ShardMetadataData_ES_6_8.java index caa97408a..0a5fbbd9a 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/ShardMetadataData_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/ShardMetadataData_ES_6_8.java @@ -1,13 +1,12 @@ package com.rfs.version_es_6_8; import java.io.IOException; -import java.util.ArrayList; +import java.util.Collections; import java.util.List; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; @@ -16,20 +15,22 @@ import com.rfs.models.ShardFileInfo; import com.rfs.models.ShardMetadata; +import lombok.Getter; +@Getter public class ShardMetadataData_ES_6_8 implements ShardMetadata { private static final ObjectMapper objectMapper = new ObjectMapper(); - private String snapshotName; - private String indexName; - private String indexId; - private int shardId; - private int indexVersion; - private long startTime; - private long time; - private int numberOfFiles; - private long totalSize; - private List files; + private final String snapshotName; + private final String indexName; + private final String indexId; + private final int shardId; + private final int indexVersion; + private final long startTime; + private final long time; + private final int numberOfFiles; + private final long totalSizeBytes; + private final List files; public ShardMetadataData_ES_6_8( String snapshotName, @@ -51,65 +52,14 @@ public ShardMetadataData_ES_6_8( this.startTime = startTime; this.time = time; this.numberOfFiles = numberOfFiles; - this.totalSize = totalSize; + this.totalSizeBytes = totalSize; // Convert the raw file metadata to the FileMetadata class List convertedFiles = new java.util.ArrayList<>(); for (FileInfoRaw fileMetadataRaw : files) { convertedFiles.add(FileInfo.fromFileMetadataRaw(fileMetadataRaw)); } - this.files = convertedFiles; - } - - @Override - public String getSnapshotName() { - return snapshotName; - } - - @Override - public String getIndexName() { - return indexName; - } - - @Override - public String getIndexId() { - return indexId; - } - - @Override - public int getShardId() { - return shardId; - } - - @Override - public int getIndexVersion() { - return indexVersion; - } - - @Override - public long getStartTime() { - return startTime; - } - - @Override - public long getTime() { - return time; - } - - @Override - public int getNumberOfFiles() { - return numberOfFiles; - } - - @Override - public long getTotalSizeBytes() { - return totalSize; - } - - @Override - public List getFiles() { - List convertedFiles = new ArrayList<>(files); - return convertedFiles; + this.files = Collections.unmodifiableList(convertedFiles); } @Override @@ -150,15 +100,16 @@ public DataRaw( } } + @Getter public static class FileInfo implements ShardFileInfo { - private String name; - private String physicalName; - private long length; - private String checksum; - private long partSize; - private long numberOfParts; - private String writtenBy; - private BytesRef metaHash; + private final String name; + private final String physicalName; + private final long length; + private final String checksum; + private final long partSize; + private final long numberOfParts; + private final String writtenBy; + private final BytesRef metaHash; public static FileInfo fromFileMetadataRaw(FileInfoRaw fileMetadataRaw) { return new FileInfo( @@ -189,7 +140,9 @@ public FileInfo( this.writtenBy = writtenBy; this.metaHash = metaHash; - // Calculate the number of parts the file is chopped into; taken from Elasticsearch code + // Calculate the number of parts the file is chopped into; taken from Elasticsearch code. When Elasticsearch makes + // a snapshot and finds Lucene files over a specified size, it will split those files into multiple parts based on the + // maximum part size. // See: // https://github.com/elastic/elasticsearch/blob/6.8/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java#L68 long partBytes = Long.MAX_VALUE; @@ -198,54 +151,14 @@ public FileInfo( } long totalLength = length; - long numberOfParts = totalLength / partBytes; + long numberOfPartsTemp = totalLength / partBytes; if (totalLength % partBytes > 0) { - numberOfParts++; + numberOfPartsTemp++; } - if (numberOfParts == 0) { - numberOfParts++; + if (numberOfPartsTemp == 0) { + numberOfPartsTemp++; } - this.numberOfParts = numberOfParts; - } - - @Override - public String getName() { - return name; - } - - @Override - public String getPhysicalName() { - return physicalName; - } - - @Override - public long getLength() { - return length; - } - - @Override - public String getChecksum() { - return checksum; - } - - @Override - public long getPartSize() { - return partSize; - } - - @Override - public String getWrittenBy() { - return writtenBy; - } - - @Override - public BytesRef getMetaHash() { - return metaHash; - } - - @Override - public long getNumberOfParts() { - return numberOfParts; + this.numberOfParts = numberOfPartsTemp; } // The Snapshot file may be split into multiple blobs; use this to find the correct file name @@ -298,8 +211,7 @@ public FileInfoRaw( public static class FileInfoRawDeserializer extends JsonDeserializer { @Override - public FileInfoRaw deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, - JsonProcessingException { + public FileInfoRaw deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { JsonNode rootNode = jp.getCodec().readTree(jp); diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataData_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataData_ES_6_8.java index ed78bb576..14f4b7b5c 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataData_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataData_ES_6_8.java @@ -5,9 +5,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.rfs.models.SnapshotMetadata; +import lombok.Getter; +@Getter public class SnapshotMetadataData_ES_6_8 implements SnapshotMetadata { - private String name; private String uuid; @JsonProperty("version_id") @@ -26,64 +27,4 @@ public class SnapshotMetadataData_ES_6_8 implements SnapshotMetadata { @JsonProperty("successful_shards") private int successfulShards; private List failures; // Haven't looked at this yet - - @Override - public String getName() { - return name; - } - - @Override - public String getUuid() { - return uuid; - } - - @Override - public int getVersionId() { - return versionId; - } - - @Override - public List getIndices() { - return indices; - } - - @Override - public String getState() { - return state; - } - - @Override - public String getReason() { - return reason; - } - - @Override - public boolean isIncludeGlobalState() { - return includeGlobalState; - } - - @Override - public long getStartTime() { - return startTime; - } - - @Override - public long getEndTime() { - return endTime; - } - - @Override - public int getTotalShards() { - return totalShards; - } - - @Override - public int getSuccessfulShards() { - return successfulShards; - } - - @Override - public List getFailures() { - return failures; - } } diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataFactory_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataFactory_ES_6_8.java index 34c8106d1..dc328faf7 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataFactory_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotMetadataFactory_ES_6_8.java @@ -18,11 +18,10 @@ public class SnapshotMetadataFactory_ES_6_8 implements SnapshotMetadata.Factory public SnapshotMetadata fromJsonNode(JsonNode root) throws Exception { ObjectMapper mapper = new ObjectMapper(); ObjectNode objectNodeRoot = (ObjectNode) root; - SnapshotMetadataData_ES_6_8 snapshotMetadata = mapper.treeToValue( + return mapper.treeToValue( objectNodeRoot.get("snapshot"), SnapshotMetadataData_ES_6_8.class ); - return snapshotMetadata; } @Override diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoData_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoData_ES_6_8.java index 74c453c80..8a34e929b 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoData_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoData_ES_6_8.java @@ -11,6 +11,10 @@ import com.rfs.common.SnapshotRepo; import com.rfs.common.SnapshotRepo.CantParseRepoFile; import com.rfs.common.SourceRepo; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; public class SnapshotRepoData_ES_6_8 { @@ -36,19 +40,20 @@ public static SnapshotRepoData_ES_6_8 fromRepo(SourceRepo repo) { return fromRepoFile(file); } - public Path filePath; - public List snapshots; - public Map indices; + @Getter + private Path filePath; + @Getter + private List snapshots; + @Getter + private Map indices; + @Getter + @AllArgsConstructor + @NoArgsConstructor public static class Snapshot implements SnapshotRepo.Snapshot { - public String name; - public String uuid; - public int state; - - @Override - public String getName() { - return name; - } + private String name; + private String uuid; + private int state; @Override public String getId() { @@ -56,37 +61,23 @@ public String getId() { } } + @Getter + @AllArgsConstructor + @NoArgsConstructor public static class RawIndex { - public String id; - public List snapshots; + private String id; + private List snapshots; } + @Getter + @RequiredArgsConstructor public static class Index implements SnapshotRepo.Index { public static Index fromRawIndex(String name, RawIndex rawIndex) { - Index index = new Index(); - index.name = name; - index.id = rawIndex.id; - index.snapshots = rawIndex.snapshots; - return index; - } - - public String name; - public String id; - public List snapshots; - - @Override - public String getName() { - return name; + return new Index(name, rawIndex.id, rawIndex.snapshots); } - @Override - public String getId() { - return id; - } - - @Override - public List getSnapshots() { - return snapshots; - } + private final String name; + private final String id; + private final List snapshots; } } diff --git a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoProvider_ES_6_8.java b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoProvider_ES_6_8.java index 7576b9757..44cef6202 100644 --- a/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoProvider_ES_6_8.java +++ b/RFS/src/main/java/com/rfs/version_es_6_8/SnapshotRepoProvider_ES_6_8.java @@ -23,7 +23,7 @@ protected SnapshotRepoData_ES_6_8 getRepoData() { } public List getIndices() { - return getRepoData().indices.entrySet() + return getRepoData().getIndices().entrySet() .stream() .map(entry -> SnapshotRepoData_ES_6_8.Index.fromRawIndex(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); @@ -32,14 +32,14 @@ public List getIndices() { @Override public List getIndicesInSnapshot(String snapshotName) { List matchedIndices = new ArrayList<>(); - SnapshotRepoData_ES_6_8.Snapshot targetSnapshot = getRepoData().snapshots.stream() - .filter(snapshot -> snapshotName.equals(snapshot.name)) + SnapshotRepoData_ES_6_8.Snapshot targetSnapshot = getRepoData().getSnapshots().stream() + .filter(snapshot -> snapshotName.equals(snapshot.getName())) .findFirst() .orElse(null); if (targetSnapshot != null) { - getRepoData().indices.forEach((indexName, rawIndex) -> { - if (rawIndex.snapshots.contains(targetSnapshot.uuid)) { + getRepoData().getIndices().forEach((indexName, rawIndex) -> { + if (rawIndex.getSnapshots().contains(targetSnapshot.getId())) { matchedIndices.add(SnapshotRepoData_ES_6_8.Index.fromRawIndex(indexName, rawIndex)); } }); @@ -49,15 +49,14 @@ public List getIndicesInSnapshot(String snapshotName) { @Override public List getSnapshots() { - List convertedList = new ArrayList<>(getRepoData().snapshots); - return convertedList; + return new ArrayList<>(getRepoData().getSnapshots()); } @Override public String getSnapshotId(String snapshotName) { - for (SnapshotRepoData_ES_6_8.Snapshot snapshot : getRepoData().snapshots) { - if (snapshot.name.equals(snapshotName)) { - return snapshot.uuid; + for (SnapshotRepoData_ES_6_8.Snapshot snapshot : getRepoData().getSnapshots()) { + if (snapshot.getName().equals(snapshotName)) { + return snapshot.getId(); } } return null; @@ -65,7 +64,7 @@ public String getSnapshotId(String snapshotName) { @Override public String getIndexId(String indexName) { - return getRepoData().indices.get(indexName).id; + return getRepoData().getIndices().get(indexName).getId(); } @Override diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/ElasticsearchConstants_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/ElasticsearchConstants_ES_7_10.java index e26b90244..3d95fb245 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/ElasticsearchConstants_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/ElasticsearchConstants_ES_7_10.java @@ -1,11 +1,15 @@ package com.rfs.version_es_7_10; +import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; + public class ElasticsearchConstants_ES_7_10 { + private ElasticsearchConstants_ES_7_10() {} + public static final int BUFFER_SIZE_IN_BYTES; public static final SmileFactory SMILE_FACTORY; public static final String SOFT_DELETES_FIELD; @@ -17,12 +21,13 @@ public class ElasticsearchConstants_ES_7_10 { // Taken from: // https://github.com/elastic/elasticsearch/blob/7.10/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java#L54 - SmileFactory smileFactory = new SmileFactory(); - smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); - smileFactory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); - smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); - smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, false); - SMILE_FACTORY = smileFactory; + SMILE_FACTORY = SmileFactory.builder() + .configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false) + .configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false) + .build(); + + SMILE_FACTORY.disable(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT); + SMILE_FACTORY.disable(JsonParser.Feature.STRICT_DUPLICATE_DETECTION); // Taken from: // https://github.com/elastic/elasticsearch/blob/v7.10.2/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java#L110 diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/IndexMetadataData_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/IndexMetadataData_ES_7_10.java index 54ed58ffb..64d712628 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/IndexMetadataData_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/IndexMetadataData_ES_7_10.java @@ -4,30 +4,29 @@ import com.rfs.models.IndexMetadata; import com.rfs.transformers.TransformFunctions; +import lombok.Getter; public class IndexMetadataData_ES_7_10 implements IndexMetadata { - private ObjectNode root; + @Getter + private final ObjectNode rawJson; private ObjectNode mappings; private ObjectNode settings; - private String indexId; - private String indexName; + @Getter + private final String id; + @Getter + private final String name; public IndexMetadataData_ES_7_10(ObjectNode root, String indexId, String indexName) { - this.root = root; + this.rawJson = root; this.mappings = null; this.settings = null; - this.indexId = indexId; - this.indexName = indexName; + this.id = indexId; + this.name = indexName; } @Override public ObjectNode getAliases() { - return (ObjectNode) root.get("aliases"); - } - - @Override - public String getId() { - return indexId; + return (ObjectNode) rawJson.get("aliases"); } @Override @@ -36,17 +35,12 @@ public ObjectNode getMappings() { return mappings; } - ObjectNode mappingsNode = (ObjectNode) root.get("mappings"); + ObjectNode mappingsNode = (ObjectNode) rawJson.get("mappings"); mappings = mappingsNode; return mappings; } - @Override - public String getName() { - return indexName; - } - @Override public int getNumberOfShards() { return this.getSettings().get("index").get("number_of_shards").asInt(); @@ -58,20 +52,15 @@ public ObjectNode getSettings() { return settings; } - ObjectNode treeSettings = TransformFunctions.convertFlatSettingsToTree((ObjectNode) root.get("settings")); + ObjectNode treeSettings = TransformFunctions.convertFlatSettingsToTree((ObjectNode) rawJson.get("settings")); settings = treeSettings; return settings; } - @Override - public ObjectNode rawJson() { - return root; - } - @Override public IndexMetadata deepCopy() { - return new IndexMetadataData_ES_7_10(root.deepCopy(), indexId, indexName); + return new IndexMetadataData_ES_7_10(rawJson.deepCopy(), id, name); } } diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/ShardMetadataData_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/ShardMetadataData_ES_7_10.java index 102a91c4c..31a238b28 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/ShardMetadataData_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/ShardMetadataData_ES_7_10.java @@ -1,13 +1,12 @@ package com.rfs.version_es_7_10; import java.io.IOException; -import java.util.ArrayList; +import java.util.Collections; import java.util.List; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonDeserializer; @@ -17,7 +16,9 @@ import com.rfs.models.ShardFileInfo; import com.rfs.models.ShardMetadata; +import lombok.Getter; +@Getter public class ShardMetadataData_ES_7_10 implements ShardMetadata { private static final ObjectMapper objectMapper = new ObjectMapper(); @@ -25,16 +26,16 @@ public class ShardMetadataData_ES_7_10 implements ShardMetadata { objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } - private String snapshotName; - private String indexName; - private String indexId; - private int shardId; - private int indexVersion; - private long startTime; - private long time; - private int numberOfFiles; - private long totalSize; - private List files; + private final String snapshotName; + private final String indexName; + private final String indexId; + private final int shardId; + private final int indexVersion; + private final long startTime; + private final long time; + private final int numberOfFiles; + private final long totalSizeBytes; + private final List files; public ShardMetadataData_ES_7_10( String snapshotName, @@ -56,65 +57,14 @@ public ShardMetadataData_ES_7_10( this.startTime = startTime; this.time = time; this.numberOfFiles = numberOfFiles; - this.totalSize = totalSize; + this.totalSizeBytes = totalSize; // Convert the raw file metadata to the FileMetadata class List convertedFiles = new java.util.ArrayList<>(); for (FileInfoRaw fileMetadataRaw : files) { convertedFiles.add(FileInfo.fromFileMetadataRaw(fileMetadataRaw)); } - this.files = convertedFiles; - } - - @Override - public String getSnapshotName() { - return snapshotName; - } - - @Override - public String getIndexName() { - return indexName; - } - - @Override - public String getIndexId() { - return indexId; - } - - @Override - public int getShardId() { - return shardId; - } - - @Override - public int getIndexVersion() { - return indexVersion; - } - - @Override - public long getStartTime() { - return startTime; - } - - @Override - public long getTime() { - return time; - } - - @Override - public int getNumberOfFiles() { - return numberOfFiles; - } - - @Override - public long getTotalSizeBytes() { - return totalSize; - } - - @Override - public List getFiles() { - List convertedFiles = new ArrayList<>(files); - return convertedFiles; + this.files = Collections.unmodifiableList(convertedFiles); } @Override @@ -155,15 +105,16 @@ public DataRaw( } } + @Getter public static class FileInfo implements ShardFileInfo { - private String name; - private String physicalName; - private long length; - private String checksum; - private long partSize; - private long numberOfParts; - private String writtenBy; - private BytesRef metaHash; + private final String name; + private final String physicalName; + private final long length; + private final String checksum; + private final long partSize; + private final long numberOfParts; + private final String writtenBy; + private final BytesRef metaHash; public static FileInfo fromFileMetadataRaw(FileInfoRaw fileMetadataRaw) { return new FileInfo( @@ -194,7 +145,9 @@ public FileInfo( this.writtenBy = writtenBy; this.metaHash = metaHash; - // Calculate the number of parts the file is chopped into; taken from Elasticsearch code + // Calculate the number of parts the file is chopped into; taken from Elasticsearch code. When Elasticsearch makes + // a snapshot and finds Lucene files over a specified size, it will split those files into multiple parts based on the + // maximum part size. // See: // https://github.com/elastic/elasticsearch/blob/6.8/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardSnapshot.java#L68 long partBytes = Long.MAX_VALUE; @@ -203,54 +156,14 @@ public FileInfo( } long totalLength = length; - long numberOfParts = totalLength / partBytes; + long numberOfPartsTemp = totalLength / partBytes; if (totalLength % partBytes > 0) { - numberOfParts++; + numberOfPartsTemp++; } - if (numberOfParts == 0) { - numberOfParts++; + if (numberOfPartsTemp == 0) { + numberOfPartsTemp++; } - this.numberOfParts = numberOfParts; - } - - @Override - public String getName() { - return name; - } - - @Override - public String getPhysicalName() { - return physicalName; - } - - @Override - public long getLength() { - return length; - } - - @Override - public String getChecksum() { - return checksum; - } - - @Override - public long getPartSize() { - return partSize; - } - - @Override - public String getWrittenBy() { - return writtenBy; - } - - @Override - public BytesRef getMetaHash() { - return metaHash; - } - - @Override - public long getNumberOfParts() { - return numberOfParts; + this.numberOfParts = numberOfPartsTemp; } // The Snapshot file may be split into multiple blobs; use this to find the correct file name @@ -303,8 +216,7 @@ public FileInfoRaw( public static class FileInfoRawDeserializer extends JsonDeserializer { @Override - public FileInfoRaw deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, - JsonProcessingException { + public FileInfoRaw deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { JsonNode rootNode = jp.getCodec().readTree(jp); diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataData_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataData_ES_7_10.java index 9a609ef9d..09912837d 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataData_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataData_ES_7_10.java @@ -5,9 +5,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.rfs.models.SnapshotMetadata; +import lombok.Getter; +@Getter public class SnapshotMetadataData_ES_7_10 implements SnapshotMetadata { - private String name; private String uuid; @JsonProperty("version_id") @@ -30,64 +31,4 @@ public class SnapshotMetadataData_ES_7_10 implements SnapshotMetadata { private List dataStreams; // Haven't looked into this yet @JsonProperty("metadata") private Object metaData; // Haven't looked into this yet - - @Override - public String getName() { - return name; - } - - @Override - public String getUuid() { - return uuid; - } - - @Override - public int getVersionId() { - return versionId; - } - - @Override - public List getIndices() { - return indices; - } - - @Override - public String getState() { - return state; - } - - @Override - public String getReason() { - return reason; - } - - @Override - public boolean isIncludeGlobalState() { - return includeGlobalState; - } - - @Override - public long getStartTime() { - return startTime; - } - - @Override - public long getEndTime() { - return endTime; - } - - @Override - public int getTotalShards() { - return totalShards; - } - - @Override - public int getSuccessfulShards() { - return successfulShards; - } - - @Override - public List getFailures() { - return failures; - } } diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataFactory_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataFactory_ES_7_10.java index e79b774e8..565803e0f 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataFactory_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotMetadataFactory_ES_7_10.java @@ -20,11 +20,10 @@ public SnapshotMetadata fromJsonNode(JsonNode root) throws Exception { ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ObjectNode objectNodeRoot = (ObjectNode) root; - SnapshotMetadataData_ES_7_10 snapshotMetadata = mapper.treeToValue( + return mapper.treeToValue( objectNodeRoot.get("snapshot"), SnapshotMetadataData_ES_7_10.class ); - return snapshotMetadata; } @Override diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10.java index b7b95f77d..d98a62f1e 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10.java @@ -13,7 +13,12 @@ import com.rfs.common.SnapshotRepo; import com.rfs.common.SnapshotRepo.CantParseRepoFile; import com.rfs.common.SourceRepo; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; +@Getter public class SnapshotRepoData_ES_7_10 { public static SnapshotRepoData_ES_7_10 fromRepoFile(Path filePath) { ObjectMapper mapper = new ObjectMapper(); @@ -38,26 +43,24 @@ public static SnapshotRepoData_ES_7_10 fromRepo(SourceRepo repo) { return fromRepoFile(file); } - public Path filePath; - public List snapshots; - public Map indices; + private Path filePath; + private List snapshots; + private Map indices; @JsonProperty("min_version") - public String minVersion; + private String minVersion; @JsonProperty("index_metadata_identifiers") - public Map indexMetadataIdentifiers; + private Map indexMetadataIdentifiers; + @Getter + @AllArgsConstructor + @NoArgsConstructor public static class Snapshot implements SnapshotRepo.Snapshot { - public String name; - public String uuid; - public int state; + private String name; + private String uuid; + private int state; @JsonProperty("index_metadata_lookup") - public Map indexMetadataLookup; - public String version; - - @Override - public String getName() { - return name; - } + private Map indexMetadataLookup; + private String version; @Override public String getId() { @@ -65,41 +68,26 @@ public String getId() { } } + @Getter + @AllArgsConstructor + @NoArgsConstructor public static class RawIndex { - public String id; - public List snapshots; + private String id; + private List snapshots; @JsonProperty("shard_generations") - public List shardGenerations; + private List shardGenerations; } + @Getter + @RequiredArgsConstructor public static class Index implements SnapshotRepo.Index { public static Index fromRawIndex(String name, RawIndex rawIndex) { - Index index = new Index(); - index.name = name; - index.id = rawIndex.id; - index.snapshots = rawIndex.snapshots; - index.shardGenerations = rawIndex.shardGenerations; - return index; - } - - public String name; - public String id; - public List snapshots; - public List shardGenerations; - - @Override - public String getName() { - return name; + return new Index(name, rawIndex.id, rawIndex.snapshots, rawIndex.shardGenerations); } - @Override - public String getId() { - return id; - } - - @Override - public List getSnapshots() { - return snapshots; - } + private final String name; + private final String id; + private final List snapshots; + private final List shardGenerations; } } diff --git a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoProvider_ES_7_10.java b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoProvider_ES_7_10.java index 046c1a916..ab154f85e 100644 --- a/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoProvider_ES_7_10.java +++ b/RFS/src/main/java/com/rfs/version_es_7_10/SnapshotRepoProvider_ES_7_10.java @@ -23,7 +23,7 @@ protected SnapshotRepoData_ES_7_10 getRepoData() { } public List getIndices() { - return getRepoData().indices.entrySet() + return getRepoData().getIndices().entrySet() .stream() .map(entry -> SnapshotRepoData_ES_7_10.Index.fromRawIndex(entry.getKey(), entry.getValue())) .collect(Collectors.toList()); @@ -32,15 +32,15 @@ public List getIndices() { @Override public List getIndicesInSnapshot(String snapshotName) { List matchedIndices = new ArrayList<>(); - SnapshotRepoData_ES_7_10.Snapshot targetSnapshot = getRepoData().snapshots.stream() - .filter(snapshot -> snapshotName.equals(snapshot.name)) + SnapshotRepoData_ES_7_10.Snapshot targetSnapshot = getRepoData().getSnapshots().stream() + .filter(snapshot -> snapshotName.equals(snapshot.getName())) .findFirst() .orElse(null); if (targetSnapshot != null) { - targetSnapshot.indexMetadataLookup.keySet().forEach(indexId -> { - getRepoData().indices.forEach((indexName, rawIndex) -> { - if (indexId.equals(rawIndex.id)) { + targetSnapshot.getIndexMetadataLookup().keySet().forEach(indexId -> { + getRepoData().getIndices().forEach((indexName, rawIndex) -> { + if (indexId.equals(rawIndex.getId())) { matchedIndices.add(SnapshotRepoData_ES_7_10.Index.fromRawIndex(indexName, rawIndex)); } }); @@ -51,14 +51,13 @@ public List getIndicesInSnapshot(String snapshotName) { @Override public List getSnapshots() { - List convertedList = new ArrayList<>(getRepoData().snapshots); - return convertedList; + return new ArrayList<>(getRepoData().getSnapshots()); } public String getSnapshotId(String snapshotName) { - for (SnapshotRepoData_ES_7_10.Snapshot snapshot : getRepoData().snapshots) { - if (snapshot.name.equals(snapshotName)) { - return snapshot.uuid; + for (SnapshotRepoData_ES_7_10.Snapshot snapshot : getRepoData().getSnapshots()) { + if (snapshot.getName().equals(snapshotName)) { + return snapshot.getId(); } } return null; @@ -66,7 +65,7 @@ public String getSnapshotId(String snapshotName) { @Override public String getIndexId(String indexName) { - return getRepoData().indices.get(indexName).id; + return getRepoData().getIndices().get(indexName).getId(); } @Override @@ -80,15 +79,15 @@ public String getIndexMetadataId(String snapshotName, String indexName) { return null; } - String metadataLookupKey = getRepoData().snapshots.stream() - .filter(snapshot -> snapshot.name.equals(snapshotName)) - .map(snapshot -> snapshot.indexMetadataLookup.get(indexId)) + String metadataLookupKey = getRepoData().getSnapshots().stream() + .filter(snapshot -> snapshot.getName().equals(snapshotName)) + .map(snapshot -> snapshot.getIndexMetadataLookup().get(indexId)) .findFirst() .orElse(null); if (metadataLookupKey == null) { return null; } - return getRepoData().indexMetadataIdentifiers.get(metadataLookupKey); + return getRepoData().getIndexMetadataIdentifiers().get(metadataLookupKey); } } diff --git a/RFS/src/main/java/com/rfs/version_os_2_11/IndexCreator_OS_2_11.java b/RFS/src/main/java/com/rfs/version_os_2_11/IndexCreator_OS_2_11.java index c163eadd8..97ad3613b 100644 --- a/RFS/src/main/java/com/rfs/version_os_2_11/IndexCreator_OS_2_11.java +++ b/RFS/src/main/java/com/rfs/version_os_2_11/IndexCreator_OS_2_11.java @@ -24,7 +24,7 @@ public boolean create( MigrationMode mode, ICreateIndexContext context ) { - IndexMetadataData_OS_2_11 indexMetadata = new IndexMetadataData_OS_2_11(index.rawJson(), index.getId(), index.getName()); + IndexMetadataData_OS_2_11 indexMetadata = new IndexMetadataData_OS_2_11(index.getRawJson(), index.getId(), index.getName()); // Remove some settings which will cause errors if you try to pass them to the API ObjectNode settings = indexMetadata.getSettings(); diff --git a/RFS/src/main/java/com/rfs/version_os_2_11/IndexMetadataData_OS_2_11.java b/RFS/src/main/java/com/rfs/version_os_2_11/IndexMetadataData_OS_2_11.java index 660bf0afa..db023e619 100644 --- a/RFS/src/main/java/com/rfs/version_os_2_11/IndexMetadataData_OS_2_11.java +++ b/RFS/src/main/java/com/rfs/version_os_2_11/IndexMetadataData_OS_2_11.java @@ -46,7 +46,7 @@ public ObjectNode getSettings() { } @Override - public ObjectNode rawJson() { + public ObjectNode getRawJson() { return root; } diff --git a/RFS/src/main/java/com/rfs/version_universal/RemoteIndexMetadata.java b/RFS/src/main/java/com/rfs/version_universal/RemoteIndexMetadata.java index e9ee759e7..2d818edeb 100644 --- a/RFS/src/main/java/com/rfs/version_universal/RemoteIndexMetadata.java +++ b/RFS/src/main/java/com/rfs/version_universal/RemoteIndexMetadata.java @@ -13,7 +13,7 @@ public class RemoteIndexMetadata implements IndexMetadata { private ObjectNode sourceData; @Override - public ObjectNode rawJson() { + public ObjectNode getRawJson() { return sourceData; } diff --git a/RFS/src/test/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10Test.java b/RFS/src/test/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10Test.java index 4a527d510..6d3aacbbb 100644 --- a/RFS/src/test/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10Test.java +++ b/RFS/src/test/java/com/rfs/version_es_7_10/SnapshotRepoData_ES_7_10Test.java @@ -44,8 +44,8 @@ void testFromRepoFile_default() { final var result = SnapshotRepoData_ES_7_10.fromRepoFile(jsonInFile); // Verify - assertThat(result.minVersion, equalTo("7.9.0")); - assertThat(result.indices.size(), equalTo(1)); + assertThat(result.getMinVersion(), equalTo("7.9.0")); + assertThat(result.getIndices().size(), equalTo(1)); } @Test @@ -58,8 +58,8 @@ void testFromRepoFile_extraFields() { final var result = SnapshotRepoData_ES_7_10.fromRepoFile(jsonInFile); // Verify - assertThat(result.minVersion, equalTo("7.9.0")); - assertThat(result.indices.size(), equalTo(1)); + assertThat(result.getMinVersion(), equalTo("7.9.0")); + assertThat(result.getIndices().size(), equalTo(1)); } private String insertAtLine(final String source, final String toAdd, final int lineNumber) { diff --git a/transformation/src/main/java/org/opensearch/migrations/transformation/entity/Entity.java b/transformation/src/main/java/org/opensearch/migrations/transformation/entity/Entity.java index 542d3e965..47161001d 100644 --- a/transformation/src/main/java/org/opensearch/migrations/transformation/entity/Entity.java +++ b/transformation/src/main/java/org/opensearch/migrations/transformation/entity/Entity.java @@ -15,5 +15,5 @@ public interface Entity { /** * Gets the underlying entity as an ObjectNode, supports read and write operations */ - ObjectNode rawJson(); + ObjectNode getRawJson(); } diff --git a/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java b/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java index 336feec3c..5ada91268 100644 --- a/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java +++ b/transformation/src/main/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemoval.java @@ -43,7 +43,7 @@ public class IndexMappingTypeRemoval implements TransformationRule { @Override public CanApplyResult canApply(final Index index) { - final var mappingNode = index.rawJson().get(MAPPINGS_KEY); + final var mappingNode = index.getRawJson().get(MAPPINGS_KEY); if (mappingNode == null) { return CanApplyResult.NO; @@ -74,7 +74,7 @@ public boolean applyTransformation(final Index index) { return false; } - final var mappingsNode = index.rawJson().get(MAPPINGS_KEY); + final var mappingsNode = index.getRawJson().get(MAPPINGS_KEY); // Handle array case if (mappingsNode.isArray()) { final var mappingsInnerNode = (ObjectNode) mappingsNode.get(0); @@ -84,7 +84,7 @@ public boolean applyTransformation(final Index index) { mappingsInnerNode.remove(typeName); typeNode.fields().forEachRemaining(node -> mappingsInnerNode.set(node.getKey(), node.getValue())); - index.rawJson().set(MAPPINGS_KEY, mappingsInnerNode); + index.getRawJson().set(MAPPINGS_KEY, mappingsInnerNode); } if (mappingsNode.isObject()) { diff --git a/transformation/src/test/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemovalTest.java b/transformation/src/test/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemovalTest.java index 1cda71c03..3d9e77068 100644 --- a/transformation/src/test/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemovalTest.java +++ b/transformation/src/test/java/org/opensearch/migrations/transformation/rules/IndexMappingTypeRemovalTest.java @@ -102,14 +102,14 @@ public ObjectNode indexSettingJson(final String mappingSection) { private CanApplyResult canApply(final ObjectNode indexJson) { var transformer = new IndexMappingTypeRemoval(); var index = mock(Index.class); - Mockito.when(index.rawJson()).thenReturn(indexJson); + Mockito.when(index.getRawJson()).thenReturn(indexJson); return transformer.canApply(index); } private boolean applyTransformation(final ObjectNode indexJson) { var transformer = new IndexMappingTypeRemoval(); var index = mock(Index.class); - Mockito.when(index.rawJson()).thenReturn(indexJson); + Mockito.when(index.getRawJson()).thenReturn(indexJson); log.atInfo().setMessage("Original\n{}").addArgument(indexJson.toPrettyString()).log(); var wasChanged = transformer.applyTransformation(index);