From 7ceb13270e86ded749d4e2a5a2290c5f3486fc30 Mon Sep 17 00:00:00 2001 From: Jakub Scholz Date: Sat, 7 Sep 2024 16:58:14 +0200 Subject: [PATCH 1/3] Continue refactoring of the test module Signed-off-by: Jakub Scholz --- .checkstyle/suppressions.xml | 4 +- api/pom.xml | 5 - .../api/kafka/model/AbstractCrdIT.java | 6 +- .../api/kafka/model/AbstractCrdTest.java | 10 +- .../strimzi/api/kafka/model/ExamplesTest.java | 7 +- .../api/kafka/model/JvmOptionsTest.java | 15 +- .../api/kafka/model/KafkaJmxOptionsTest.java | 6 +- .../api/kafka/model/StructuralCrdIT.java | 5 +- .../kafka/model/bridge/KafkaBridgeCrdIT.java | 8 +- .../model/connect/KafkaConnectCrdIT.java | 5 +- .../model/connector/KafkaConnectorCrdIT.java | 5 +- .../api/kafka/model/kafka/KafkaCrdIT.java | 5 +- .../api/kafka/model/kafka/KafkaTest.java | 12 +- .../mirrormaker/KafkaMirrorMakerCrdIT.java | 5 +- .../mirrormaker2/KafkaMirrorMaker2CrdIT.java | 5 +- .../model/nodepool/KafkaNodePoolCrdIT.java | 5 +- .../model/podset/StrimziPodSetCrdIT.java | 5 +- .../model/rebalance/KafkaRebalanceCrdIT.java | 5 +- .../kafka/model/topic/KafkaTopicCrdIT.java | 5 +- .../api/kafka/model/user/KafkaUserCrdIT.java | 5 +- .../operator/cluster/ResourceUtils.java | 15 +- .../cluster/model/AbstractModelTest.java | 6 +- .../cluster/model/CruiseControlTest.java | 18 +- .../cluster/model/EntityOperatorTest.java | 16 +- .../model/EntityTopicOperatorTest.java | 5 +- .../cluster/model/EntityUserOperatorTest.java | 3 +- .../cluster/model/KafkaBridgeClusterTest.java | 22 +- .../model/KafkaClusterListenersTest.java | 88 ++-- .../cluster/model/KafkaClusterTest.java | 52 +- .../model/KafkaClusterZooBasedTest.java | 71 ++- .../cluster/model/KafkaConnectBuildTest.java | 18 +- .../model/KafkaConnectClusterTest.java | 29 +- .../cluster/model/KafkaExporterTest.java | 12 +- .../model/KafkaMirrorMaker2ClusterTest.java | 29 +- .../model/KafkaMirrorMakerClusterTest.java | 23 +- .../model/ZookeeperClusterPodSetTest.java | 8 +- .../cluster/model/ZookeeperClusterTest.java | 26 +- .../operator/assembly/CaReconcilerTest.java | 78 +-- .../operator/assembly/ConnectorMockTest.java | 14 +- .../assembly/KafkaAssemblyOperatorTest.java | 10 +- .../KafkaAssemblyOperatorZooBasedTest.java | 20 +- .../KafkaBridgeAssemblyOperatorTest.java | 2 +- .../operator/assembly/KafkaConnectApiIT.java | 3 +- .../KafkaConnectAssemblyOperatorMockTest.java | 8 +- ...emblyOperatorConnectorAutoRestartTest.java | 3 +- ...aMirrorMaker2AssemblyOperatorMockTest.java | 6 +- .../KafkaMirrorMakerAssemblyOperatorTest.java | 2 +- .../KafkaRebalanceAssemblyOperatorTest.java | 7 +- .../KafkaRebalanceStateMachineTest.java | 5 +- .../assembly/StrimziPodSetControllerIT.java | 17 +- .../KafkaBrokerConfigurationDiffTest.java | 6 +- .../CruiseControlClientTest.java | 5 +- .../cruisecontrol/MockCruiseControl.java | 40 +- .../AbstractCustomResourceOperatorIT.java | 5 +- .../kubernetes/KafkaBridgeCrdOperatorIT.java | 4 +- .../kubernetes/KafkaConnectCrdOperatorIT.java | 4 +- .../KafkaConnectorCrdOperatorIT.java | 4 +- .../kubernetes/KafkaCrdOperatorIT.java | 4 +- .../KafkaMirrorMaker2CrdOperatorIT.java | 4 +- .../KafkaMirrorMakerCrdOperatorIT.java | 4 +- .../StrimziPodSetCrdOperatorIT.java | 4 +- .../io/strimzi/test/mockkube3/MockKube3.java | 20 +- .../common/auth/PemAuthIdentityTest.java | 7 +- .../operator/common/auth/PemTrustSetTest.java | 3 +- .../common/model/ResourceVisitorTest.java | 6 +- .../common/model/ValidationVisitorTest.java | 4 +- .../AbstractCustomResourceOperatorIT.java | 5 +- .../concurrent/KafkaUserCrdOperatorIT.java | 4 +- .../strimzi/systemtest/logs/LogCollector.java | 2 +- .../draincleaner/SetupDrainCleaner.java | 15 +- .../resources/jaeger/SetupJaeger.java | 4 +- .../ClusterRoleBindingResource.java | 4 +- .../kubernetes/DeploymentResource.java | 4 +- .../kubernetes/RoleBindingResource.java | 4 +- .../resources/kubernetes/RoleResource.java | 4 +- .../operator/SetupClusterOperator.java | 27 +- .../templates/crd/KafkaConnectTemplates.java | 4 +- .../crd/KafkaMirrorMaker2Templates.java | 4 +- .../templates/crd/KafkaTemplates.java | 6 +- .../utils/specific/BridgeUtils.java | 4 +- .../java/io/strimzi/test/executor/Exec.java | 115 +++-- .../io/strimzi/test/executor/ExecResult.java | 15 + .../strimzi/systemtest/connect/ConnectST.java | 3 +- .../systemtest/log/LoggingChangeST.java | 3 +- .../mirrormaker/MirrorMaker2ST.java | 3 +- .../systemtest/operators/user/UserST.java | 9 +- .../systemtest/security/SecurityST.java | 3 +- .../systemtest/specific/SpecificST.java | 20 +- .../systemtest/upgrade/AbstractUpgradeST.java | 13 +- .../upgrade/kraft/AbstractKRaftUpgradeST.java | 4 +- .../main/java/io/strimzi/test/CrdUtils.java | 122 +++++ .../java/io/strimzi/test/ReadWriteUtils.java | 318 ++++++++++++ .../main/java/io/strimzi/test/TestUtils.java | 465 ++++-------------- .../java/io/strimzi/test/WaitException.java | 13 + .../test/annotations/IsolatedTest.java | 3 + .../ExtensionContextParameterResolver.java | 3 + .../test/interfaces/TestSeparator.java | 19 + .../operator/topic/TopicOperatorTestUtil.java | 3 +- .../CruiseControlHandlerTest.java | 9 +- .../cruisecontrol/MockCruiseControl.java | 22 +- .../user/model/KafkaUserModelTest.java | 10 +- 101 files changed, 1211 insertions(+), 923 deletions(-) create mode 100644 test/src/main/java/io/strimzi/test/CrdUtils.java create mode 100644 test/src/main/java/io/strimzi/test/ReadWriteUtils.java diff --git a/.checkstyle/suppressions.xml b/.checkstyle/suppressions.xml index 31604c11311..bf2c33e5cac 100644 --- a/.checkstyle/suppressions.xml +++ b/.checkstyle/suppressions.xml @@ -39,7 +39,7 @@ - - + + \ No newline at end of file diff --git a/api/pom.xml b/api/pom.xml index c7d1109f02e..40971a51732 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -62,11 +62,6 @@ com.fasterxml.jackson.core jackson-databind - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - test - io.sundr builder-annotations diff --git a/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdIT.java index 22ed45870d0..796ddbe5f04 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdIT.java @@ -6,7 +6,7 @@ import io.fabric8.kubernetes.client.CustomResource; import io.fabric8.kubernetes.client.KubernetesClient; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.interfaces.TestSeparator; import org.junit.jupiter.api.TestInstance; @@ -62,11 +62,11 @@ private void createDelete(File resourceFile) { } protected void createScaleDelete(Class resourceClass, String resource) { - T model = TestUtils.fromYaml(resource, resourceClass); + T model = ReadWriteUtils.readObjectFromYamlFileInResources(resource, resourceClass); String apiVersion = model.getApiVersion(); String kind = model.getKind(); String resourceName = model.getMetadata().getName(); - String resourceYamlAsString = TestUtils.toYamlString(model); + String resourceYamlAsString = ReadWriteUtils.writeObjectToYamlString(model); createScaleDelete(apiVersion, kind, resourceName, resourceYamlAsString); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdTest.java b/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdTest.java index bb45ba8fc6f..af317ac3822 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdTest.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/AbstractCrdTest.java @@ -6,7 +6,7 @@ import io.fabric8.kubernetes.api.model.ObjectMeta; import io.fabric8.kubernetes.client.CustomResource; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.junit.jupiter.api.Test; import static org.hamcrest.CoreMatchers.is; @@ -24,23 +24,23 @@ protected AbstractCrdTest(Class crdClass) { } protected void assertDesiredResource(R actual, String expectedResource) { - String content = TestUtils.readResource(getClass(), expectedResource); + String content = ReadWriteUtils.readFileFromResources(getClass(), expectedResource); assertThat("The resource " + expectedResource + " does not exist", content, is(notNullValue())); - String ssStr = TestUtils.toYamlString(actual); + String ssStr = ReadWriteUtils.writeObjectToYamlString(actual); assertThat(ssStr.trim(), is(content.trim())); } @Test public void roundTrip() { String resourceName = crdClass.getSimpleName() + ".yaml"; - R model = TestUtils.fromYaml(resourceName, crdClass); + R model = ReadWriteUtils.readObjectFromYamlFileInResources(resourceName, crdClass); assertThat("The classpath resource " + resourceName + " does not exist", model, is(notNullValue())); ObjectMeta metadata = model.getMetadata(); assertThat(metadata, is(notNullValue())); assertDesiredResource(model, crdClass.getSimpleName() + ".out.yaml"); - assertDesiredResource(TestUtils.fromYamlString(TestUtils.toYamlString(model), crdClass), crdClass.getSimpleName() + ".out.yaml"); + assertDesiredResource(ReadWriteUtils.readObjectFromYamlString(ReadWriteUtils.writeObjectToYamlString(model), crdClass), crdClass.getSimpleName() + ".out.yaml"); } } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/ExamplesTest.java b/api/src/test/java/io/strimzi/api/kafka/model/ExamplesTest.java index 59f52a2af7a..d2588846beb 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/ExamplesTest.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/ExamplesTest.java @@ -5,11 +5,10 @@ package io.strimzi.api.kafka.model; import com.fasterxml.jackson.annotation.JsonAnyGetter; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import io.fabric8.kubernetes.api.model.KubernetesResource; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinitionSpec; import io.fabric8.kubernetes.client.KubernetesClientBuilder; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.Test; @@ -25,7 +24,6 @@ import static org.junit.jupiter.api.Assertions.fail; - /** * The purpose of this test is to check that all the resources in the * {@code ../packaging/examples} directory are valid. @@ -57,8 +55,7 @@ private void validateRecursively(File directory) { private void validate(File f) { try { - ObjectMapper mapper = new YAMLMapper(); - final String content = TestUtils.readFile(f); + final String content = ReadWriteUtils.readFile(f); validate(content); } catch (Exception | AssertionError e) { throw new AssertionError("Invalid example yaml in " + f.getPath() + ": " + e.getMessage(), e); diff --git a/api/src/test/java/io/strimzi/api/kafka/model/JvmOptionsTest.java b/api/src/test/java/io/strimzi/api/kafka/model/JvmOptionsTest.java index 747984f700c..1d0db01c664 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/JvmOptionsTest.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/JvmOptionsTest.java @@ -5,16 +5,19 @@ package io.strimzi.api.kafka.model; import io.strimzi.api.kafka.model.common.JvmOptions; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.junit.jupiter.api.Test; +import java.util.Map; + import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; + public class JvmOptionsTest { @Test public void testSetXmxXms() { - JvmOptions opts = TestUtils.fromYamlString("-Xmx: 2g\n" + + JvmOptions opts = ReadWriteUtils.readObjectFromYamlString("-Xmx: 2g\n" + "-Xms: 1g", JvmOptions.class); @@ -24,7 +27,7 @@ public void testSetXmxXms() { @Test public void testEmptyXmxXms() { - JvmOptions opts = TestUtils.fromYamlString("{}", JvmOptions.class); + JvmOptions opts = ReadWriteUtils.readObjectFromYamlString("{}", JvmOptions.class); assertThat(opts.getXms(), is(nullValue())); assertThat(opts.getXmx(), is(nullValue())); @@ -32,7 +35,7 @@ public void testEmptyXmxXms() { @Test public void testXx() { - JvmOptions opts = TestUtils.fromYamlString("-XX:\n" + + JvmOptions opts = ReadWriteUtils.readObjectFromYamlString("-XX:\n" + " key1: value1\n" + " key2: value2\n" + " key3: true\n" + @@ -40,9 +43,9 @@ public void testXx() { " key5: 10\n", JvmOptions.class); - assertThat(opts.getXx(), is(TestUtils.map("key1", "value1", "key2", "value2", "key3", "true", "key4", "true", "key5", "10"))); + assertThat(opts.getXx(), is(Map.of("key1", "value1", "key2", "value2", "key3", "true", "key4", "true", "key5", "10"))); - opts = TestUtils.fromYamlString("{}", JvmOptions.class); + opts = ReadWriteUtils.readObjectFromYamlString("{}", JvmOptions.class); assertThat(opts.getXx(), is(nullValue())); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/KafkaJmxOptionsTest.java b/api/src/test/java/io/strimzi/api/kafka/model/KafkaJmxOptionsTest.java index 6fdbcc3d852..db7daddd10b 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/KafkaJmxOptionsTest.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/KafkaJmxOptionsTest.java @@ -6,7 +6,7 @@ import io.strimzi.api.kafka.model.common.jmx.KafkaJmxAuthenticationPassword; import io.strimzi.api.kafka.model.common.jmx.KafkaJmxOptions; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.junit.jupiter.api.Test; import java.util.Collections; @@ -20,7 +20,7 @@ public class KafkaJmxOptionsTest { @Test public void testAuthentication() { - KafkaJmxOptions opts = TestUtils.fromYamlString( + KafkaJmxOptions opts = ReadWriteUtils.readObjectFromYamlString( "authentication:\n" + " type: password", KafkaJmxOptions.class); @@ -31,7 +31,7 @@ public void testAuthentication() { @Test public void testNoJmxOpts() { - KafkaJmxOptions opts = TestUtils.fromYamlString("{}", KafkaJmxOptions.class); + KafkaJmxOptions opts = ReadWriteUtils.readObjectFromYamlString("{}", KafkaJmxOptions.class); assertThat(opts.getAuthentication(), is(nullValue())); assertThat(opts.getAdditionalProperties(), is(Collections.emptyMap())); diff --git a/api/src/test/java/io/strimzi/api/kafka/model/StructuralCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/StructuralCrdIT.java index 5b017e79251..6942aecaf42 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/StructuralCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/StructuralCrdIT.java @@ -10,6 +10,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.strimzi.api.annotations.ApiVersion; import io.strimzi.api.annotations.VersionRange; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -59,10 +60,10 @@ public void v1Beta2IsStructuralWithCrdV1() { private void assertApiVersionsAreStructural(String crdName, String crdPath, VersionRange shouldBeStructural) { try { - TestUtils.createCrd(client, crdName, crdPath); + CrdUtils.createCrd(client, crdName, crdPath); assertApiVersionsAreStructuralInApiextensionsV1(crdName, shouldBeStructural); } finally { - TestUtils.deleteCrd(client, crdName); + CrdUtils.deleteCrd(client, crdName); } } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/bridge/KafkaBridgeCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/bridge/KafkaBridgeCrdIT.java index eb7f1fa7e97..b4968bea861 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/bridge/KafkaBridgeCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/bridge/KafkaBridgeCrdIT.java @@ -8,6 +8,8 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -85,7 +87,7 @@ void testKafkaBridgeWithOpenTelemetryTracing() { void testLoadKafkaBridgeWithWrongTracingType() { Throwable exception = assertThrows( RuntimeException.class, - () -> TestUtils.fromYaml("KafkaBridge-with-wrong-tracing-type.yaml", KafkaBridge.class)); + () -> ReadWriteUtils.readObjectFromYamlFileInResources("KafkaBridge-with-wrong-tracing-type.yaml", KafkaBridge.class)); assertThat(exception.getMessage(), allOf( containsStringIgnoringCase("Could not resolve type id 'wrongtype'"), @@ -119,13 +121,13 @@ void testKafkaBridgeWithMetrics() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaBridge.CRD_NAME, TestUtils.CRD_KAFKA_BRIDGE); + CrdUtils.createCrd(client, KafkaBridge.CRD_NAME, CrdUtils.CRD_KAFKA_BRIDGE); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaBridge.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaBridge.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/connect/KafkaConnectCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/connect/KafkaConnectCrdIT.java index adeaa961788..c6cb3c2eb32 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/connect/KafkaConnectCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/connect/KafkaConnectCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -100,13 +101,13 @@ void testKafkaConnectWithInvalidExternalConfiguration() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaConnect.CRD_NAME, TestUtils.CRD_KAFKA_CONNECT); + CrdUtils.createCrd(client, KafkaConnect.CRD_NAME, CrdUtils.CRD_KAFKA_CONNECT); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaConnect.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaConnect.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/connector/KafkaConnectorCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/connector/KafkaConnectorCrdIT.java index 63eb6ae4b0b..d600ed8284d 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/connector/KafkaConnectorCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/connector/KafkaConnectorCrdIT.java @@ -7,6 +7,7 @@ import io.fabric8.kubernetes.client.ConfigBuilder; import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -34,13 +35,13 @@ void testKafkaConnectorScaling() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaConnector.CRD_NAME, TestUtils.CRD_KAFKA_CONNECTOR); + CrdUtils.createCrd(client, KafkaConnector.CRD_NAME, CrdUtils.CRD_KAFKA_CONNECTOR); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaConnector.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaConnector.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaCrdIT.java index ed81beb0d05..0331e0279b2 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -113,13 +114,13 @@ public void testKafkaWithInvalidZookeeperJmxAuthentication() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, Kafka.CRD_NAME, TestUtils.CRD_KAFKA); + CrdUtils.createCrd(client, Kafka.CRD_NAME, CrdUtils.CRD_KAFKA); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, Kafka.CRD_NAME); + CrdUtils.deleteCrd(client, Kafka.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaTest.java b/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaTest.java index ca7edd6f8a8..8e64e7abbfa 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaTest.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/kafka/KafkaTest.java @@ -12,7 +12,7 @@ import io.strimzi.api.kafka.model.kafka.listener.ListenerAddressBuilder; import io.strimzi.api.kafka.model.kafka.listener.ListenerStatus; import io.strimzi.api.kafka.model.kafka.listener.ListenerStatusBuilder; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.junit.jupiter.api.Test; import java.net.URISyntaxException; @@ -80,7 +80,7 @@ public void testCertificationAuthorityBuilderAndInts() throws URISyntaxException .build(); String path = Objects.requireNonNull(this.getClass().getResource("Kafka-ca-ints.yaml")).toURI().getPath(); - assertThat(TestUtils.toYamlString(kafka), is(TestUtils.getFileAsString(path))); + assertThat(ReadWriteUtils.writeObjectToYamlString(kafka), is(ReadWriteUtils.readFile(path))); } @Test @@ -121,12 +121,12 @@ public void testNewListenerSerialization() throws URISyntaxException { .build(); String path = Objects.requireNonNull(this.getClass().getResource("Kafka-new-listener-serialization.yaml")).toURI().getPath(); - assertThat(TestUtils.toYamlString(kafka), is(TestUtils.getFileAsString(path))); + assertThat(ReadWriteUtils.writeObjectToYamlString(kafka), is(ReadWriteUtils.readFile(path))); } @Test public void testListeners() { - Kafka model = TestUtils.fromYaml("Kafka" + ".yaml", Kafka.class); + Kafka model = ReadWriteUtils.readObjectFromYamlFileInResources("Kafka" + ".yaml", Kafka.class); assertThat(model.getSpec().getKafka().getListeners(), is(notNullValue())); assertThat(model.getSpec().getKafka().getListeners().size(), is(2)); @@ -188,12 +188,12 @@ public void testListenerTypeAndNameInStatus() throws ParseException, URISyntaxEx .build(); String path = Objects.requireNonNull(this.getClass().getResource("Kafka-listener-name-and-status.yaml")).toURI().getPath(); - assertThat(TestUtils.toYamlString(kafka), is(TestUtils.getFileAsString(path))); + assertThat(ReadWriteUtils.writeObjectToYamlString(kafka), is(ReadWriteUtils.readFile(path))); } @Test public void testListenersTypeAndName() { - Kafka model = TestUtils.fromYaml("Kafka-listener-name-and-status" + ".yaml", Kafka.class); + Kafka model = ReadWriteUtils.readObjectFromYamlFileInResources("Kafka-listener-name-and-status" + ".yaml", Kafka.class); assertThat(model.getStatus().getListeners(), is(notNullValue())); assertThat(model.getStatus().getListeners().size(), is(2)); diff --git a/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker/KafkaMirrorMakerCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker/KafkaMirrorMakerCrdIT.java index 6acb20a26c8..0618972ae79 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker/KafkaMirrorMakerCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker/KafkaMirrorMakerCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -84,14 +85,14 @@ void testKafkaMirrorMakerWithCommitAndAbort() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaMirrorMaker.CRD_NAME, TestUtils.CRD_KAFKA_MIRROR_MAKER); + CrdUtils.createCrd(client, KafkaMirrorMaker.CRD_NAME, CrdUtils.CRD_KAFKA_MIRROR_MAKER); TestUtils.createNamespace(client, NAMESPACE); } @SuppressWarnings("deprecation") // Kafka Mirror Maker is deprecated @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaMirrorMaker.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaMirrorMaker.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker2/KafkaMirrorMaker2CrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker2/KafkaMirrorMaker2CrdIT.java index 9917e681799..3e6fdb38757 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker2/KafkaMirrorMaker2CrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/mirrormaker2/KafkaMirrorMaker2CrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -100,13 +101,13 @@ public void testKafkaMirrorMaker2WithInvalidExternalConfiguration() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaMirrorMaker2.CRD_NAME, TestUtils.CRD_KAFKA_MIRROR_MAKER_2); + CrdUtils.createCrd(client, KafkaMirrorMaker2.CRD_NAME, CrdUtils.CRD_KAFKA_MIRROR_MAKER_2); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaMirrorMaker2.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaMirrorMaker2.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/nodepool/KafkaNodePoolCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/nodepool/KafkaNodePoolCrdIT.java index 83fe88cc2cb..9e9f87826e2 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/nodepool/KafkaNodePoolCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/nodepool/KafkaNodePoolCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -45,13 +46,13 @@ public void testKafkaWithInvalidRole() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaNodePool.CRD_NAME, TestUtils.CRD_KAFKA_NODE_POOL); + CrdUtils.createCrd(client, KafkaNodePool.CRD_NAME, CrdUtils.CRD_KAFKA_NODE_POOL); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaNodePool.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaNodePool.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/podset/StrimziPodSetCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/podset/StrimziPodSetCrdIT.java index a8be0f0b0e4..511db06348c 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/podset/StrimziPodSetCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/podset/StrimziPodSetCrdIT.java @@ -11,6 +11,7 @@ import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.Crds; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -71,13 +72,13 @@ void testZeroReplicas() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, StrimziPodSet.CRD_NAME, TestUtils.CRD_STRIMZI_POD_SET); + CrdUtils.createCrd(client, StrimziPodSet.CRD_NAME, CrdUtils.CRD_STRIMZI_POD_SET); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, StrimziPodSet.CRD_NAME); + CrdUtils.deleteCrd(client, StrimziPodSet.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/rebalance/KafkaRebalanceCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/rebalance/KafkaRebalanceCrdIT.java index dfc3bf4b4e7..25e39525305 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/rebalance/KafkaRebalanceCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/rebalance/KafkaRebalanceCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -78,13 +79,13 @@ void testKafkaRebalanceWrongMode() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaRebalance.CRD_NAME, TestUtils.CRD_KAFKA_REBALANCE); + CrdUtils.createCrd(client, KafkaRebalance.CRD_NAME, CrdUtils.CRD_KAFKA_REBALANCE); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaRebalance.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaRebalance.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/topic/KafkaTopicCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/topic/KafkaTopicCrdIT.java index 8b74c8f3fb9..24594f254a3 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/topic/KafkaTopicCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/topic/KafkaTopicCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -47,13 +48,13 @@ void testKafkaTopicMinimal() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaTopic.CRD_NAME, TestUtils.CRD_TOPIC); + CrdUtils.createCrd(client, KafkaTopic.CRD_NAME, CrdUtils.CRD_TOPIC); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaTopic.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaTopic.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/api/src/test/java/io/strimzi/api/kafka/model/user/KafkaUserCrdIT.java b/api/src/test/java/io/strimzi/api/kafka/model/user/KafkaUserCrdIT.java index a9719a9113e..f787d03e07d 100644 --- a/api/src/test/java/io/strimzi/api/kafka/model/user/KafkaUserCrdIT.java +++ b/api/src/test/java/io/strimzi/api/kafka/model/user/KafkaUserCrdIT.java @@ -8,6 +8,7 @@ import io.fabric8.kubernetes.client.KubernetesClientBuilder; import io.fabric8.kubernetes.client.KubernetesClientException; import io.strimzi.api.kafka.model.AbstractCrdIT; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -47,13 +48,13 @@ void testKafkaUserMinimal() { @BeforeAll void setupEnvironment() { client = new KubernetesClientBuilder().withConfig(new ConfigBuilder().withNamespace(NAMESPACE).build()).build(); - TestUtils.createCrd(client, KafkaUser.CRD_NAME, TestUtils.CRD_KAFKA_USER); + CrdUtils.createCrd(client, KafkaUser.CRD_NAME, CrdUtils.CRD_KAFKA_USER); TestUtils.createNamespace(client, NAMESPACE); } @AfterAll void teardownEnvironment() { - TestUtils.deleteCrd(client, KafkaUser.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaUser.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); client.close(); } diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/ResourceUtils.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/ResourceUtils.java index d7810f743de..6de9e54ab88 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/ResourceUtils.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/ResourceUtils.java @@ -83,7 +83,6 @@ import io.strimzi.operator.common.auth.PemTrustSet; import io.strimzi.operator.common.model.Ca; import io.strimzi.operator.common.model.Labels; -import io.strimzi.test.TestUtils; import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.core.net.NetClientOptions; @@ -186,7 +185,7 @@ public static Kafka createKafka(String namespace, String name, int replicas, ObjectMeta meta = new ObjectMetaBuilder() .withNamespace(namespace) .withName(name) - .withLabels(Labels.fromMap(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")).toMap()) + .withLabels(Labels.fromMap(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")).toMap()) .build(); result.setMetadata(meta); @@ -245,7 +244,7 @@ public static KafkaConnect createEmptyKafkaConnect(String namespace, String name .withMetadata(new ObjectMetaBuilder() .withName(name) .withNamespace(namespace) - .withLabels(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", + .withLabels(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")) .withAnnotations(emptyMap()) .build()) @@ -262,7 +261,7 @@ public static KafkaBridge createEmptyKafkaBridge(String namespace, String name) .withMetadata(new ObjectMetaBuilder() .withName(name) .withNamespace(namespace) - .withLabels(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", + .withLabels(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")) .build()) .withNewSpec() @@ -280,7 +279,7 @@ public static KafkaMirrorMaker createEmptyKafkaMirrorMaker(String namespace, Str .withMetadata(new ObjectMetaBuilder() .withName(name) .withNamespace(namespace) - .withLabels(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", + .withLabels(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")) .build()) .withNewSpec() @@ -303,7 +302,7 @@ public static KafkaMirrorMaker createKafkaMirrorMaker(String namespace, String n .withMetadata(new ObjectMetaBuilder() .withName(name) .withNamespace(namespace) - .withLabels(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", + .withLabels(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")) .build()) .withNewSpec() @@ -327,7 +326,7 @@ public static KafkaBridge createKafkaBridge(String namespace, String name, Strin .withMetadata(new ObjectMetaBuilder() .withName(name) .withNamespace(namespace) - .withLabels(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", + .withLabels(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")) .build()) .withNewSpec() @@ -350,7 +349,7 @@ public static KafkaMirrorMaker2 createEmptyKafkaMirrorMaker2(String namespace, S .withMetadata(new ObjectMetaBuilder() .withName(name) .withNamespace(namespace) - .withLabels(TestUtils.map(Labels.KUBERNETES_DOMAIN + "part-of", "tests", + .withLabels(Map.of(Labels.KUBERNETES_DOMAIN + "part-of", "tests", "my-user-label", "cromulent")) .build()) .withNewSpec() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/AbstractModelTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/AbstractModelTest.java index 856578954cc..a3d5a4148bd 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/AbstractModelTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/AbstractModelTest.java @@ -11,7 +11,7 @@ import io.strimzi.api.kafka.model.kafka.Kafka; import io.strimzi.api.kafka.model.kafka.KafkaBuilder; import io.strimzi.operator.common.Reconciliation; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.annotations.ParallelSuite; import io.strimzi.test.annotations.ParallelTest; @@ -37,11 +37,11 @@ public Model(HasMetadata resource) { @ParallelTest public void testJvmPerformanceOptions() { - JvmOptions opts = TestUtils.fromYamlString("{}", JvmOptions.class); + JvmOptions opts = ReadWriteUtils.readObjectFromYamlString("{}", JvmOptions.class); assertThat(getPerformanceOptions(opts), is(nullValue())); - opts = TestUtils.fromYamlString("-XX:\n" + + opts = ReadWriteUtils.readObjectFromYamlString("-XX:\n" + " key1: value1\n" + " key2: true\n" + " key3: false\n" + diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/CruiseControlTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/CruiseControlTest.java index b4ca620959b..baafe83c4e7 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/CruiseControlTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/CruiseControlTest.java @@ -564,17 +564,17 @@ public void testGenerateService() { @SuppressWarnings("MethodLength") @ParallelTest public void testTemplate() { - Map depLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map depAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map depLabels = TestUtils.modifiableMap("l1", "v1", "l2", "v2"); + Map depAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = TestUtils.modifiableMap("l3", "v3", "l4", "v4"); + Map podAnnotations = Map.of("a3", "v3", "a4", "v4"); - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = TestUtils.modifiableMap("l5", "v5", "l6", "v6"); + Map svcAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map saLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map saAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map saLabels = Map.of("l7", "v7", "l8", "v8"); + Map saAnnotations = Map.of("a7", "v7", "a8", "v8"); Affinity affinity = new AffinityBuilder() .withNewNodeAffinity() @@ -1183,7 +1183,7 @@ private CruiseControl createCruiseControl(Kafka kafka, Set nodes, Map expectedLabels(String name) { - return TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, CLUSTER_NAME, + return TestUtils.modifiableMap(Labels.STRIMZI_CLUSTER_LABEL, CLUSTER_NAME, "my-user-label", "cromulent", Labels.STRIMZI_KIND_LABEL, Kafka.RESOURCE_KIND, Labels.STRIMZI_NAME_LABEL, name, diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityOperatorTest.java index 643a323362a..ae46af98cca 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityOperatorTest.java @@ -206,21 +206,21 @@ public void withAffinityAndTolerations() throws IOException { @ParallelTest public void testTemplate() { - Map depLabels = TestUtils.map("l1", "v1", "l2", "v2", + Map depLabels = Map.of("l1", "v1", "l2", "v2", Labels.KUBERNETES_PART_OF_LABEL, "custom-part", Labels.KUBERNETES_MANAGED_BY_LABEL, "custom-managed-by"); Map expectedDepLabels = new HashMap<>(depLabels); expectedDepLabels.remove(Labels.KUBERNETES_MANAGED_BY_LABEL); - Map depAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map depAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnnotations = Map.of("a3", "v3", "a4", "v4"); - Map saLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map saAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map saLabels = Map.of("l5", "v5", "l6", "v6"); + Map saAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map rLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map rAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map rLabels = Map.of("l7", "v7", "l8", "v8"); + Map rAnnotations = Map.of("a7", "v7", "a8", "v8"); Toleration toleration = new TolerationBuilder() .withEffect("NoSchedule") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityTopicOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityTopicOperatorTest.java index 19b90370ed8..187d2685422 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityTopicOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityTopicOperatorTest.java @@ -36,7 +36,6 @@ import static io.strimzi.operator.common.model.cruisecontrol.CruiseControlApiProperties.TOPIC_OPERATOR_PASSWORD_KEY; import static io.strimzi.operator.common.model.cruisecontrol.CruiseControlApiProperties.TOPIC_OPERATOR_USERNAME; import static io.strimzi.operator.common.model.cruisecontrol.CruiseControlApiProperties.TOPIC_OPERATOR_USERNAME_KEY; -import static io.strimzi.test.TestUtils.map; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; @@ -250,7 +249,7 @@ public void testGetContainers() { assertThat(container.getPorts().get(0).getContainerPort(), is(EntityTopicOperator.HEALTHCHECK_PORT)); assertThat(container.getPorts().get(0).getName(), is(EntityTopicOperator.HEALTHCHECK_PORT_NAME)); assertThat(container.getPorts().get(0).getProtocol(), is("TCP")); - assertThat(EntityOperatorTest.volumeMounts(container.getVolumeMounts()), is(map( + assertThat(EntityOperatorTest.volumeMounts(container.getVolumeMounts()), is(Map.of( EntityTopicOperator.TOPIC_OPERATOR_TMP_DIRECTORY_DEFAULT_VOLUME_NAME, VolumeUtils.STRIMZI_TMP_DIRECTORY_DEFAULT_MOUNT_PATH, "entity-topic-operator-metrics-and-logging", "/opt/topic-operator/custom-config/", EntityOperator.TLS_SIDECAR_CA_CERTS_VOLUME_NAME, EntityOperator.TLS_SIDECAR_CA_CERTS_VOLUME_MOUNT, @@ -318,7 +317,7 @@ public void testSetupWithCruiseControlEnabled() { assertThat(entityTopicOperator.getEnvVars(), is(expectedEnvVars)); Container container = entityTopicOperator.createContainer(null); - assertThat(EntityOperatorTest.volumeMounts(container.getVolumeMounts()), is(map( + assertThat(EntityOperatorTest.volumeMounts(container.getVolumeMounts()), is(Map.of( EntityTopicOperator.TOPIC_OPERATOR_TMP_DIRECTORY_DEFAULT_VOLUME_NAME, VolumeUtils.STRIMZI_TMP_DIRECTORY_DEFAULT_MOUNT_PATH, "entity-topic-operator-metrics-and-logging", "/opt/topic-operator/custom-config/", EntityOperator.TLS_SIDECAR_CA_CERTS_VOLUME_NAME, EntityOperator.TLS_SIDECAR_CA_CERTS_VOLUME_MOUNT, diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityUserOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityUserOperatorTest.java index 268645f7fc4..7ed948a9390 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityUserOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/EntityUserOperatorTest.java @@ -34,7 +34,6 @@ import java.util.List; import java.util.Map; -import static io.strimzi.test.TestUtils.map; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; @@ -230,7 +229,7 @@ public void testGetContainers() { assertThat(container.getPorts().get(0).getContainerPort(), is(EntityUserOperator.HEALTHCHECK_PORT)); assertThat(container.getPorts().get(0).getName(), is(EntityUserOperator.HEALTHCHECK_PORT_NAME)); assertThat(container.getPorts().get(0).getProtocol(), is("TCP")); - assertThat(EntityOperatorTest.volumeMounts(container.getVolumeMounts()), is(map( + assertThat(EntityOperatorTest.volumeMounts(container.getVolumeMounts()), is(Map.of( EntityUserOperator.USER_OPERATOR_TMP_DIRECTORY_DEFAULT_VOLUME_NAME, VolumeUtils.STRIMZI_TMP_DIRECTORY_DEFAULT_MOUNT_PATH, "entity-user-operator-metrics-and-logging", "/opt/user-operator/custom-config/", EntityOperator.TLS_SIDECAR_CA_CERTS_VOLUME_NAME, EntityOperator.TLS_SIDECAR_CA_CERTS_VOLUME_MOUNT, diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaBridgeClusterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaBridgeClusterTest.java index 258e2db3e14..8b0c0e29384 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaBridgeClusterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaBridgeClusterTest.java @@ -131,7 +131,7 @@ public class KafkaBridgeClusterTest { private final KafkaBridgeCluster kbc = KafkaBridgeCluster.fromCrd(Reconciliation.DUMMY_RECONCILIATION, resource, SHARED_ENV_PROVIDER); private Map expectedLabels(String name) { - return TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, + return TestUtils.modifiableMap(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, "my-user-label", "cromulent", Labels.STRIMZI_NAME_LABEL, name, Labels.STRIMZI_COMPONENT_TYPE_LABEL, KafkaBridgeCluster.COMPONENT_TYPE, @@ -425,24 +425,24 @@ public void testGenerateDeploymentWithPlainAuth() { @ParallelTest @SuppressWarnings({"checkstyle:methodlength"}) public void testTemplate() { - Map depLabels = TestUtils.map("l1", "v1", "l2", "v2", + Map depLabels = Map.of("l1", "v1", "l2", "v2", Labels.KUBERNETES_PART_OF_LABEL, "custom-part", Labels.KUBERNETES_MANAGED_BY_LABEL, "custom-managed-by"); Map expectedDepLabels = new HashMap<>(depLabels); expectedDepLabels.remove(Labels.KUBERNETES_MANAGED_BY_LABEL); - Map depAnots = TestUtils.map("a1", "v1", "a2", "v2"); + Map depAnots = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnots = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnots = Map.of("a3", "v3", "a4", "v4"); - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnots = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnots = Map.of("a5", "v5", "a6", "v6"); - Map pdbLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map pdbAnots = TestUtils.map("a7", "v7", "a8", "v8"); + Map pdbLabels = Map.of("l7", "v7", "l8", "v8"); + Map pdbAnots = Map.of("a7", "v7", "a8", "v8"); - Map saLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map saAnots = TestUtils.map("a9", "v9", "a10", "v10"); + Map saLabels = Map.of("l9", "v9", "l10", "v10"); + Map saAnots = Map.of("a9", "v9", "a10", "v10"); Affinity affinity = new AffinityBuilder() .withNewNodeAffinity() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterListenersTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterListenersTest.java index 0b747f06790..c205c849d91 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterListenersTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterListenersTest.java @@ -142,29 +142,29 @@ private Map expectedBrokerSelectorLabels() { @SuppressWarnings({"checkstyle:MethodLength"}) @ParallelTest public void testListenersTemplate() { - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map hSvcLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map hSvcAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map hSvcLabels = Map.of("l7", "v7", "l8", "v8"); + Map hSvcAnnotations = Map.of("a7", "v7", "a8", "v8"); - Map exSvcLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map exSvcAnnotations = TestUtils.map("a9", "v9", "a10", "v10"); + Map exSvcLabels = Map.of("l9", "v9", "l10", "v10"); + Map exSvcAnnotations = Map.of("a9", "v9", "a10", "v10"); - Map perPodSvcLabels = TestUtils.map("l11", "v11", "l12", "v12"); - Map perPodSvcAnnotations = TestUtils.map("a11", "v11", "a12", "v12"); + Map perPodSvcLabels = Map.of("l11", "v11", "l12", "v12"); + Map perPodSvcAnnotations = Map.of("a11", "v11", "a12", "v12"); - Map exRouteLabels = TestUtils.map("l13", "v13", "l14", "v14"); - Map exRouteAnnotations = TestUtils.map("a13", "v13", "a14", "v14"); + Map exRouteLabels = Map.of("l13", "v13", "l14", "v14"); + Map exRouteAnnotations = Map.of("a13", "v13", "a14", "v14"); - Map perPodRouteLabels = TestUtils.map("l15", "v15", "l16", "v16"); - Map perPodRouteAnnotations = TestUtils.map("a15", "v15", "a16", "v16"); + Map perPodRouteLabels = Map.of("l15", "v15", "l16", "v16"); + Map perPodRouteAnnotations = Map.of("a15", "v15", "a16", "v16"); - Map exIngressLabels = TestUtils.map("l17", "v17", "l18", "v18"); - Map exIngressAnnotations = TestUtils.map("a17", "v17", "a18", "v18"); + Map exIngressLabels = Map.of("l17", "v17", "l18", "v18"); + Map exIngressAnnotations = Map.of("a17", "v17", "a18", "v18"); - Map perPodIngressLabels = TestUtils.map("l19", "v19", "l20", "v20"); - Map perPodIngressAnnotations = TestUtils.map("a19", "v19", "a20", "v20"); + Map perPodIngressLabels = Map.of("l19", "v19", "l20", "v20"); + Map perPodIngressAnnotations = Map.of("a19", "v19", "a20", "v20"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -338,39 +338,39 @@ public void testListenersTemplate() { @SuppressWarnings({"checkstyle:MethodLength"}) @ParallelTest public void testListenersTemplateFromKafkaAndNodePools() { - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map hSvcLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map hSvcAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map hSvcLabels = Map.of("l7", "v7", "l8", "v8"); + Map hSvcAnnotations = Map.of("a7", "v7", "a8", "v8"); - Map exSvcLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map exSvcAnnotations = TestUtils.map("a9", "v9", "a10", "v10"); + Map exSvcLabels = Map.of("l9", "v9", "l10", "v10"); + Map exSvcAnnotations = Map.of("a9", "v9", "a10", "v10"); - Map perPodSvcLabels = TestUtils.map("l11", "v11", "l12", "v12"); - Map perPodSvcAnnotations = TestUtils.map("a11", "v11", "a12", "v12"); + Map perPodSvcLabels = Map.of("l11", "v11", "l12", "v12"); + Map perPodSvcAnnotations = Map.of("a11", "v11", "a12", "v12"); - Map exRouteLabels = TestUtils.map("l13", "v13", "l14", "v14"); - Map exRouteAnnotations = TestUtils.map("a13", "v13", "a14", "v14"); + Map exRouteLabels = Map.of("l13", "v13", "l14", "v14"); + Map exRouteAnnotations = Map.of("a13", "v13", "a14", "v14"); - Map perPodRouteLabels = TestUtils.map("l15", "v15", "l16", "v16"); - Map perPodRouteAnnotations = TestUtils.map("a15", "v15", "a16", "v16"); + Map perPodRouteLabels = Map.of("l15", "v15", "l16", "v16"); + Map perPodRouteAnnotations = Map.of("a15", "v15", "a16", "v16"); - Map exIngressLabels = TestUtils.map("l17", "v17", "l18", "v18"); - Map exIngressAnnotations = TestUtils.map("a17", "v17", "a18", "v18"); + Map exIngressLabels = Map.of("l17", "v17", "l18", "v18"); + Map exIngressAnnotations = Map.of("a17", "v17", "a18", "v18"); - Map perPodIngressLabels = TestUtils.map("l19", "v19", "l20", "v20"); - Map perPodIngressAnnotations = TestUtils.map("a19", "v19", "a20", "v20"); + Map perPodIngressLabels = Map.of("l19", "v19", "l20", "v20"); + Map perPodIngressAnnotations = Map.of("a19", "v19", "a20", "v20"); // Node pool values - Map perPodSvcLabels2 = TestUtils.map("l21", "v21", "l22", "v22"); - Map perPodSvcAnnotations2 = TestUtils.map("a21", "v21", "a22", "v22"); + Map perPodSvcLabels2 = Map.of("l21", "v21", "l22", "v22"); + Map perPodSvcAnnotations2 = Map.of("a21", "v21", "a22", "v22"); - Map perPodRouteLabels2 = TestUtils.map("l25", "v25", "l26", "v26"); - Map perPodRouteAnnotations2 = TestUtils.map("a25", "v25", "a26", "v26"); + Map perPodRouteLabels2 = Map.of("l25", "v25", "l26", "v26"); + Map perPodRouteAnnotations2 = Map.of("a25", "v25", "a26", "v26"); - Map perPodIngressLabels2 = TestUtils.map("l29", "v29", "l30", "v30"); - Map perPodIngressAnnotations2 = TestUtils.map("a29", "v29", "a30", "v30"); + Map perPodIngressLabels2 = Map.of("l29", "v29", "l30", "v30"); + Map perPodIngressAnnotations2 = Map.of("a29", "v29", "a30", "v30"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -597,14 +597,14 @@ public void testListenersTemplateFromKafkaAndNodePools() { @ParallelTest public void testListenersTemplateFromNodePools() { // Node pool values - Map perPodSvcLabels2 = TestUtils.map("l21", "v21", "l22", "v22"); - Map perPodSvcAnnotations2 = TestUtils.map("a21", "v21", "a22", "v22"); + Map perPodSvcLabels2 = Map.of("l21", "v21", "l22", "v22"); + Map perPodSvcAnnotations2 = Map.of("a21", "v21", "a22", "v22"); - Map perPodRouteLabels2 = TestUtils.map("l25", "v25", "l26", "v26"); - Map perPodRouteAnnotations2 = TestUtils.map("a25", "v25", "a26", "v26"); + Map perPodRouteLabels2 = Map.of("l25", "v25", "l26", "v26"); + Map perPodRouteAnnotations2 = Map.of("a25", "v25", "a26", "v26"); - Map perPodIngressLabels2 = TestUtils.map("l29", "v29", "l30", "v30"); - Map perPodIngressAnnotations2 = TestUtils.map("a29", "v29", "a30", "v30"); + Map perPodIngressLabels2 = Map.of("l29", "v29", "l30", "v30"); + Map perPodIngressAnnotations2 = Map.of("a29", "v29", "a30", "v30"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterTest.java index cfb153cd5fa..17344fbef82 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterTest.java @@ -993,17 +993,17 @@ public void testContainerPorts() { @ParallelTest public void testAuxiliaryResourcesTemplate() { - Map pdbLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map pdbAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map pdbLabels = Map.of("l1", "v1", "l2", "v2"); + Map pdbAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map crbLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map crbAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map crbLabels = Map.of("l3", "v3", "l4", "v4"); + Map crbAnnotations = Map.of("a3", "v3", "a4", "v4"); - Map saLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map saAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map saLabels = Map.of("l5", "v5", "l6", "v6"); + Map saAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map jmxLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map jmxAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map jmxLabels = Map.of("l7", "v7", "l8", "v8"); + Map jmxAnnotations = Map.of("a7", "v7", "a8", "v8"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -1889,8 +1889,8 @@ public void testDefaultPodDisruptionBudget() { @ParallelTest public void testCustomizedPodDisruptionBudget() { - Map pdbLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map pdbAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map pdbLabels = Map.of("l1", "v1", "l2", "v2"); + Map pdbAnnotations = Map.of("a1", "v1", "a2", "v2"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -3774,11 +3774,11 @@ public void testPodSet() { @ParallelTest public void testCustomizedPodSet() { // Prepare various template values - Map spsLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map spsAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map spsLabels = Map.of("l1", "v1", "l2", "v2"); + Map spsAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnnotations = Map.of("a3", "v3", "a4", "v4"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") @@ -4040,17 +4040,17 @@ public void testCustomizedPodSet() { @ParallelTest public void testCustomizedPodSetInKafkaAndNodePool() { // Prepare various template values - Map spsLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map spsAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map spsLabels = Map.of("l1", "v1", "l2", "v2"); + Map spsAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnnotations = Map.of("a3", "v3", "a4", "v4"); - Map poolSpsLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map poolSpsAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map poolSpsLabels = Map.of("l5", "v5", "l6", "v6"); + Map poolSpsAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map poolPodLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map poolPodAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map poolPodLabels = Map.of("l7", "v7", "l8", "v8"); + Map poolPodAnnotations = Map.of("a7", "v7", "a8", "v8"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") @@ -4435,11 +4435,11 @@ public void testCustomizedPodSetInKafkaAndNodePool() { @ParallelTest public void testCustomizedPodSetInNodePool() { // Prepare various template values - Map spsLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map spsAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map spsLabels = Map.of("l1", "v1", "l2", "v2"); + Map spsAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnnotations = Map.of("a3", "v3", "a4", "v4"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterZooBasedTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterZooBasedTest.java index c92b62d75f4..78e4083c535 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterZooBasedTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaClusterZooBasedTest.java @@ -124,7 +124,6 @@ import static io.strimzi.operator.cluster.model.jmx.JmxModel.JMX_PORT; import static io.strimzi.operator.cluster.model.jmx.JmxModel.JMX_PORT_NAME; -import static io.strimzi.test.TestUtils.set; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -555,32 +554,32 @@ public void testExposesJmxContainerPortWhenJmxEnabled() { @SuppressWarnings({"checkstyle:MethodLength"}) @ParallelTest public void testTemplate() { - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map hSvcLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map hSvcAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map hSvcLabels = Map.of("l7", "v7", "l8", "v8"); + Map hSvcAnnotations = Map.of("a7", "v7", "a8", "v8"); - Map exSvcLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map exSvcAnnotations = TestUtils.map("a9", "v9", "a10", "v10"); + Map exSvcLabels = Map.of("l9", "v9", "l10", "v10"); + Map exSvcAnnotations = Map.of("a9", "v9", "a10", "v10"); - Map perPodSvcLabels = TestUtils.map("l11", "v11", "l12", "v12"); - Map perPodSvcAnnotations = TestUtils.map("a11", "v11", "a12", "v12"); + Map perPodSvcLabels = Map.of("l11", "v11", "l12", "v12"); + Map perPodSvcAnnotations = Map.of("a11", "v11", "a12", "v12"); - Map exRouteLabels = TestUtils.map("l13", "v13", "l14", "v14"); - Map exRouteAnnotations = TestUtils.map("a13", "v13", "a14", "v14"); + Map exRouteLabels = Map.of("l13", "v13", "l14", "v14"); + Map exRouteAnnotations = Map.of("a13", "v13", "a14", "v14"); - Map perPodRouteLabels = TestUtils.map("l15", "v15", "l16", "v16"); - Map perPodRouteAnnotations = TestUtils.map("a15", "v15", "a16", "v16"); + Map perPodRouteLabels = Map.of("l15", "v15", "l16", "v16"); + Map perPodRouteAnnotations = Map.of("a15", "v15", "a16", "v16"); - Map pdbLabels = TestUtils.map("l17", "v17", "l18", "v18"); - Map pdbAnnotations = TestUtils.map("a17", "v17", "a18", "v18"); + Map pdbLabels = Map.of("l17", "v17", "l18", "v18"); + Map pdbAnnotations = Map.of("a17", "v17", "a18", "v18"); - Map crbLabels = TestUtils.map("l19", "v19", "l20", "v20"); - Map crbAnnotations = TestUtils.map("a19", "v19", "a20", "v20"); + Map crbLabels = Map.of("l19", "v19", "l20", "v20"); + Map crbAnnotations = Map.of("a19", "v19", "a20", "v20"); - Map saLabels = TestUtils.map("l21", "v21", "l22", "v22"); - Map saAnnotations = TestUtils.map("a21", "v21", "a22", "v22"); + Map saLabels = Map.of("l21", "v21", "l22", "v22"); + Map saAnnotations = Map.of("a21", "v21", "a22", "v22"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -1520,7 +1519,7 @@ public void testPvcsWithEmptyStorageSelector() { @ParallelTest public void testPvcsWithSetStorageSelector() { - Map selector = TestUtils.map("foo", "bar"); + Map selector = Map.of("foo", "bar"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() .editKafka() @@ -2401,13 +2400,13 @@ public void testGetExternalNodePortServiceAddressOverrideWithNonNullAdvertisedHo @ParallelTest public void testGenerateBrokerSecret() throws CertificateParsingException { Secret secret = generateBrokerSecret(null, emptyMap()); - assertThat(secret.getData().keySet(), is(set( + assertThat(secret.getData().keySet(), is(Set.of( "foo-kafka-0.crt", "foo-kafka-0.key", "foo-kafka-1.crt", "foo-kafka-1.key", "foo-kafka-2.crt", "foo-kafka-2.key"))); X509Certificate cert = Ca.cert(secret, "foo-kafka-0.crt"); assertThat(cert.getSubjectX500Principal().getName(), is("CN=foo-kafka,O=io.strimzi")); - assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(set( + assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(Set.of( asList(2, "foo-kafka-0.foo-kafka-brokers.test.svc.cluster.local"), asList(2, "foo-kafka-0.foo-kafka-brokers.test.svc"), asList(2, "foo-kafka-bootstrap"), @@ -2428,13 +2427,13 @@ public void testGenerateBrokerSecretExternal() throws CertificateParsingExceptio externalAddresses.put(2, Collections.singleton("123.10.125.132")); Secret secret = generateBrokerSecret(Collections.singleton("123.10.125.140"), externalAddresses); - assertThat(secret.getData().keySet(), is(set( + assertThat(secret.getData().keySet(), is(Set.of( "foo-kafka-0.crt", "foo-kafka-0.key", "foo-kafka-1.crt", "foo-kafka-1.key", "foo-kafka-2.crt", "foo-kafka-2.key"))); X509Certificate cert = Ca.cert(secret, "foo-kafka-0.crt"); assertThat(cert.getSubjectX500Principal().getName(), is("CN=foo-kafka,O=io.strimzi")); - assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(set( + assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(Set.of( asList(2, "foo-kafka-0.foo-kafka-brokers.test.svc.cluster.local"), asList(2, "foo-kafka-0.foo-kafka-brokers.test.svc"), asList(2, "foo-kafka-bootstrap"), @@ -2452,18 +2451,18 @@ public void testGenerateBrokerSecretExternal() throws CertificateParsingExceptio @ParallelTest public void testGenerateBrokerSecretExternalWithManyDNS() throws CertificateParsingException { Map> externalAddresses = new HashMap<>(); - externalAddresses.put(0, TestUtils.set("123.10.125.130", "my-broker-0")); - externalAddresses.put(1, TestUtils.set("123.10.125.131", "my-broker-1")); - externalAddresses.put(2, TestUtils.set("123.10.125.132", "my-broker-2")); + externalAddresses.put(0, Set.of("123.10.125.130", "my-broker-0")); + externalAddresses.put(1, Set.of("123.10.125.131", "my-broker-1")); + externalAddresses.put(2, Set.of("123.10.125.132", "my-broker-2")); - Secret secret = generateBrokerSecret(TestUtils.set("123.10.125.140", "my-bootstrap"), externalAddresses); - assertThat(secret.getData().keySet(), is(set( + Secret secret = generateBrokerSecret(Set.of("123.10.125.140", "my-bootstrap"), externalAddresses); + assertThat(secret.getData().keySet(), is(Set.of( "foo-kafka-0.crt", "foo-kafka-0.key", "foo-kafka-1.crt", "foo-kafka-1.key", "foo-kafka-2.crt", "foo-kafka-2.key"))); X509Certificate cert = Ca.cert(secret, "foo-kafka-0.crt"); assertThat(cert.getSubjectX500Principal().getName(), is("CN=foo-kafka,O=io.strimzi")); - assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(set( + assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(Set.of( asList(2, "foo-kafka-0.foo-kafka-brokers.test.svc.cluster.local"), asList(2, "foo-kafka-0.foo-kafka-brokers.test.svc"), asList(2, "foo-kafka-bootstrap"), @@ -2708,8 +2707,8 @@ public void testDefaultPodDisruptionBudget() { @ParallelTest public void testCustomizedPodDisruptionBudget() { - Map pdbLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map pdbAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map pdbLabels = Map.of("l1", "v1", "l2", "v2"); + Map pdbAnnotations = Map.of("a1", "v1", "a2", "v2"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -4277,11 +4276,11 @@ public void testPodSet() { @ParallelTest public void testCustomizedPodSet() { // Prepare various template values - Map spsLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map spsAnnotations = TestUtils.map("a1", "v1", "a2", "v2"); + Map spsLabels = Map.of("l1", "v1", "l2", "v2"); + Map spsAnnotations = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnotations = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnnotations = Map.of("a3", "v3", "a4", "v4"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectBuildTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectBuildTest.java index c26818909c2..3c8c30b6810 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectBuildTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectBuildTest.java @@ -197,7 +197,7 @@ public void testDeployment() { assertThat(pod.getMetadata().getName(), is(KafkaConnectResources.buildPodName(cluster))); assertThat(pod.getMetadata().getNamespace(), is(namespace)); - Map expectedDeploymentLabels = TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, + Map expectedDeploymentLabels = Map.of(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, Labels.STRIMZI_NAME_LABEL, KafkaConnectResources.buildPodName(cluster), Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND, Labels.STRIMZI_COMPONENT_TYPE_LABEL, KafkaConnectBuild.COMPONENT_TYPE, @@ -325,7 +325,7 @@ public void testBuildconfigWithDockerOutput() { assertThat(bc.getMetadata().getName(), is(KafkaConnectResources.buildConfigName(cluster))); assertThat(bc.getMetadata().getNamespace(), is(namespace)); - Map expectedDeploymentLabels = TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, + Map expectedDeploymentLabels = Map.of(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, Labels.STRIMZI_NAME_LABEL, KafkaConnectResources.buildPodName(cluster), Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND, Labels.STRIMZI_COMPONENT_TYPE_LABEL, KafkaConnectBuild.COMPONENT_TYPE, @@ -407,7 +407,7 @@ public void testBuildconfigWithImageStreamOutput() { assertThat(bc.getMetadata().getName(), is(KafkaConnectResources.buildConfigName(cluster))); assertThat(bc.getMetadata().getNamespace(), is(namespace)); - Map expectedDeploymentLabels = TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, + Map expectedDeploymentLabels = Map.of(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, Labels.STRIMZI_NAME_LABEL, KafkaConnectResources.buildPodName(cluster), Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND, Labels.STRIMZI_COMPONENT_TYPE_LABEL, KafkaConnectBuild.COMPONENT_TYPE, @@ -425,14 +425,14 @@ public void testBuildconfigWithImageStreamOutput() { @ParallelTest public void testTemplate() { - Map buildPodLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map buildPodAnnos = TestUtils.map("a1", "v1", "a2", "v2"); + Map buildPodLabels = Map.of("l1", "v1", "l2", "v2"); + Map buildPodAnnos = Map.of("a1", "v1", "a2", "v2"); - Map buildConfigLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map buildConfigAnnos = TestUtils.map("a3", "v3", "a4", "v4"); + Map buildConfigLabels = Map.of("l3", "v3", "l4", "v4"); + Map buildConfigAnnos = Map.of("a3", "v3", "a4", "v4"); - Map saLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map saAnots = TestUtils.map("a5", "v5", "a6", "v6"); + Map saLabels = Map.of("l5", "v5", "l6", "v6"); + Map saAnots = Map.of("a5", "v5", "a6", "v6"); SecretVolumeSource secret = new SecretVolumeSourceBuilder() .withSecretName("secret1") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectClusterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectClusterTest.java index 0f4ad84214c..a680c27658b 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectClusterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaConnectClusterTest.java @@ -77,6 +77,7 @@ import io.strimzi.operator.common.model.OrderedProperties; import io.strimzi.platform.KubernetesVersion; import io.strimzi.plugin.security.profiles.impl.RestrictedPodSecurityProvider; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.annotations.ParallelSuite; import io.strimzi.test.annotations.ParallelTest; @@ -141,7 +142,7 @@ public class KafkaConnectClusterTest { private final KafkaConnect resource = new KafkaConnectBuilder(ResourceUtils.createEmptyKafkaConnect(namespace, clusterName)) .withNewSpec() - .withConfig((Map) TestUtils.fromYamlString(configurationJson, Map.class)) + .withConfig((Map) ReadWriteUtils.readObjectFromYamlString(configurationJson, Map.class)) .withImage(image) .withReplicas(replicas) .withReadinessProbe(new Probe(healthDelay, healthTimeout)) @@ -169,7 +170,7 @@ private void checkMetricsConfigMap(ConfigMap metricsCm) { } private Map expectedLabels(String name) { - return TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.clusterName, + return TestUtils.modifiableMap(Labels.STRIMZI_CLUSTER_LABEL, this.clusterName, "my-user-label", "cromulent", Labels.STRIMZI_NAME_LABEL, name, Labels.STRIMZI_KIND_LABEL, KafkaConnect.RESOURCE_KIND, @@ -716,27 +717,27 @@ public void testPodSet() { @ParallelTest @SuppressWarnings({"checkstyle:methodlength"}) public void testTemplate() { - Map spsLabels = TestUtils.map("l1", "v1", "l2", "v2", + Map spsLabels = Map.of("l1", "v1", "l2", "v2", Labels.KUBERNETES_PART_OF_LABEL, "custom-part", Labels.KUBERNETES_MANAGED_BY_LABEL, "custom-managed-by"); Map expectedDepLabels = new HashMap<>(spsLabels); expectedDepLabels.remove(Labels.KUBERNETES_MANAGED_BY_LABEL); - Map spsAnnos = TestUtils.map("a1", "v1", "a2", "v2"); + Map spsAnnos = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnots = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnots = Map.of("a3", "v3", "a4", "v4"); - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnots = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnots = Map.of("a5", "v5", "a6", "v6"); - Map pdbLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map pdbAnots = TestUtils.map("a7", "v7", "a8", "v8"); + Map pdbLabels = Map.of("l7", "v7", "l8", "v8"); + Map pdbAnots = Map.of("a7", "v7", "a8", "v8"); - Map crbLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map crbAnots = TestUtils.map("a9", "v9", "a10", "v10"); + Map crbLabels = Map.of("l9", "v9", "l10", "v10"); + Map crbAnots = Map.of("a9", "v9", "a10", "v10"); - Map saLabels = TestUtils.map("l11", "v11", "l12", "v12"); - Map saAnots = TestUtils.map("a11", "v11", "a12", "v12"); + Map saLabels = Map.of("l11", "v11", "l12", "v12"); + Map saAnots = Map.of("a11", "v11", "a12", "v12"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaExporterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaExporterTest.java index 650bbcde503..0d766108330 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaExporterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaExporterTest.java @@ -298,18 +298,18 @@ public void testExporterNotDeployed() { @ParallelTest public void testTemplate() { - Map depLabels = TestUtils.map("l1", "v1", "l2", "v2", + Map depLabels = Map.of("l1", "v1", "l2", "v2", Labels.KUBERNETES_PART_OF_LABEL, "custom-part", Labels.KUBERNETES_MANAGED_BY_LABEL, "custom-managed-by"); Map expectedDepLabels = new HashMap<>(depLabels); expectedDepLabels.remove(Labels.KUBERNETES_MANAGED_BY_LABEL); - Map depAnots = TestUtils.map("a1", "v1", "a2", "v2"); + Map depAnots = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnots = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnots = Map.of("a3", "v3", "a4", "v4"); - Map saLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map saAnots = TestUtils.map("a5", "v5", "a6", "v6"); + Map saLabels = Map.of("l5", "v5", "l6", "v6"); + Map saAnots = Map.of("a5", "v5", "a6", "v6"); Affinity affinity = new AffinityBuilder() .withNewNodeAffinity() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMaker2ClusterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMaker2ClusterTest.java index 0e6a3ade087..34c30c04db7 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMaker2ClusterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMaker2ClusterTest.java @@ -72,6 +72,7 @@ import io.strimzi.operator.common.model.OrderedProperties; import io.strimzi.platform.KubernetesVersion; import io.strimzi.plugin.security.profiles.impl.RestrictedPodSecurityProvider; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.annotations.ParallelSuite; import io.strimzi.test.annotations.ParallelTest; @@ -132,7 +133,7 @@ public class KafkaMirrorMaker2ClusterTest { private final KafkaMirrorMaker2ClusterSpec targetCluster = new KafkaMirrorMaker2ClusterSpecBuilder() .withAlias(targetClusterAlias) .withBootstrapServers(bootstrapServers) - .withConfig((Map) TestUtils.fromYamlString(configurationJson, Map.class)) + .withConfig((Map) ReadWriteUtils.readObjectFromYamlString(configurationJson, Map.class)) .build(); private final KafkaMirrorMaker2 resource = new KafkaMirrorMaker2Builder(ResourceUtils.createEmptyKafkaMirrorMaker2(namespace, clusterName)) @@ -171,7 +172,7 @@ private void checkMetricsConfigMap(ConfigMap metricsCm) { } private Map expectedLabels(String name) { - return TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.clusterName, + return Map.of(Labels.STRIMZI_CLUSTER_LABEL, this.clusterName, "my-user-label", "cromulent", Labels.STRIMZI_NAME_LABEL, name, Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker2.RESOURCE_KIND, @@ -901,27 +902,27 @@ public void testPodSetWithPlainAuthAndTLSSameSecret() { @ParallelTest @SuppressWarnings({"checkstyle:methodlength"}) public void testTemplate() { - Map podSetLabels = TestUtils.map("l1", "v1", "l2", "v2", + Map podSetLabels = Map.of("l1", "v1", "l2", "v2", Labels.KUBERNETES_PART_OF_LABEL, "custom-part", Labels.KUBERNETES_MANAGED_BY_LABEL, "custom-managed-by"); Map expectedPodSetLabels = new HashMap<>(podSetLabels); expectedPodSetLabels.remove(Labels.KUBERNETES_MANAGED_BY_LABEL); - Map podSetAnnos = TestUtils.map("a1", "v1", "a2", "v2"); + Map podSetAnnos = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnots = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnots = Map.of("a3", "v3", "a4", "v4"); - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnots = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnots = Map.of("a5", "v5", "a6", "v6"); - Map pdbLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map pdbAnots = TestUtils.map("a7", "v7", "a8", "v8"); + Map pdbLabels = Map.of("l7", "v7", "l8", "v8"); + Map pdbAnots = Map.of("a7", "v7", "a8", "v8"); - Map crbLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map crbAnots = TestUtils.map("a9", "v9", "a10", "v10"); + Map crbLabels = Map.of("l9", "v9", "l10", "v10"); + Map crbAnots = Map.of("a9", "v9", "a10", "v10"); - Map saLabels = TestUtils.map("l11", "v11", "l12", "v12"); - Map saAnots = TestUtils.map("a11", "v11", "a12", "v12"); + Map saLabels = Map.of("l11", "v11", "l12", "v12"); + Map saAnots = Map.of("a11", "v11", "a12", "v12"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMakerClusterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMakerClusterTest.java index 722dc066059..540b17f310f 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMakerClusterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/KafkaMirrorMakerClusterTest.java @@ -50,6 +50,7 @@ import io.strimzi.operator.common.model.Labels; import io.strimzi.platform.KubernetesVersion; import io.strimzi.plugin.security.profiles.impl.RestrictedPodSecurityProvider; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.annotations.ParallelSuite; import io.strimzi.test.annotations.ParallelTest; @@ -104,7 +105,7 @@ public class KafkaMirrorMakerClusterTest { private final KafkaMirrorMakerProducerSpec producer = new KafkaMirrorMakerProducerSpecBuilder() .withBootstrapServers(producerBootstrapServers) .withAbortOnSendFailure(abortOnSendFailure) - .withConfig((Map) TestUtils.fromYamlString(producerConfigurationJson, Map.class)) + .withConfig((Map) ReadWriteUtils.readObjectFromYamlString(producerConfigurationJson, Map.class)) .build(); private final KafkaMirrorMakerConsumerSpec consumer = new KafkaMirrorMakerConsumerSpecBuilder() @@ -112,7 +113,7 @@ public class KafkaMirrorMakerClusterTest { .withGroupId(groupId) .withNumStreams(numStreams) .withOffsetCommitInterval(offsetCommitInterval) - .withConfig((Map) TestUtils.fromYamlString(consumerConfigurationJson, Map.class)) + .withConfig((Map) ReadWriteUtils.readObjectFromYamlString(consumerConfigurationJson, Map.class)) .build(); private final KafkaMirrorMaker resource = new KafkaMirrorMakerBuilder(ResourceUtils.createEmptyKafkaMirrorMaker(namespace, cluster)) @@ -144,7 +145,7 @@ private void checkMetricsConfigMap(ConfigMap metricsCm) { } private Map expectedLabels(String name) { - return TestUtils.map(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, + return Map.of(Labels.STRIMZI_CLUSTER_LABEL, this.cluster, "my-user-label", "cromulent", Labels.STRIMZI_KIND_LABEL, KafkaMirrorMaker.RESOURCE_KIND, Labels.STRIMZI_NAME_LABEL, name, @@ -563,21 +564,21 @@ public void testGenerateDeploymentWithPlain() { @ParallelTest public void testTemplate() { - Map depLabels = TestUtils.map("l1", "v1", "l2", "v2", + Map depLabels = Map.of("l1", "v1", "l2", "v2", Labels.KUBERNETES_PART_OF_LABEL, "custom-part", Labels.KUBERNETES_MANAGED_BY_LABEL, "custom-managed-by"); Map expectedDepLabels = new HashMap<>(depLabels); expectedDepLabels.remove(Labels.KUBERNETES_MANAGED_BY_LABEL); - Map depAnots = TestUtils.map("a1", "v1", "a2", "v2"); + Map depAnots = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnots = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnots = Map.of("a3", "v3", "a4", "v4"); - Map pdbLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map pdbAnots = TestUtils.map("a5", "v5", "a6", "v6"); + Map pdbLabels = Map.of("l5", "v5", "l6", "v6"); + Map pdbAnots = Map.of("a5", "v5", "a6", "v6"); - Map saLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map saAnots = TestUtils.map("a7", "v7", "a8", "v8"); + Map saLabels = Map.of("l7", "v7", "l8", "v8"); + Map saAnots = Map.of("a7", "v7", "a8", "v8"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterPodSetTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterPodSetTest.java index 3ed73965340..86ddc7e8b4a 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterPodSetTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterPodSetTest.java @@ -142,11 +142,11 @@ public void testPodSet() { @ParallelTest public void testCustomizedPodSet() { // Prepare various template values - Map spsLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map spsAnnos = TestUtils.map("a1", "v1", "a2", "v2"); + Map spsLabels = Map.of("l1", "v1", "l2", "v2"); + Map spsAnnos = Map.of("a1", "v1", "a2", "v2"); - Map podLabels = TestUtils.map("l3", "v3", "l4", "v4"); - Map podAnnos = TestUtils.map("a3", "v3", "a4", "v4"); + Map podLabels = Map.of("l3", "v3", "l4", "v4"); + Map podAnnos = Map.of("a3", "v3", "a4", "v4"); HostAlias hostAlias1 = new HostAliasBuilder() .withHostnames("my-host-1", "my-host-2") diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterTest.java index cd40a4bc29f..6fa178eb92a 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/model/ZookeeperClusterTest.java @@ -64,10 +64,10 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import static io.strimzi.operator.cluster.model.jmx.JmxModel.JMX_PORT; import static io.strimzi.operator.cluster.model.jmx.JmxModel.JMX_PORT_NAME; -import static io.strimzi.test.TestUtils.set; import static java.util.Arrays.asList; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; @@ -405,13 +405,13 @@ public void withTolerations() throws IOException { @ParallelTest public void testGenerateBrokerSecret() throws CertificateParsingException { Secret secret = generateCertificatesSecret(); - assertThat(secret.getData().keySet(), is(set( + assertThat(secret.getData().keySet(), is(Set.of( "foo-zookeeper-0.crt", "foo-zookeeper-0.key", "foo-zookeeper-1.crt", "foo-zookeeper-1.key", "foo-zookeeper-2.crt", "foo-zookeeper-2.key"))); X509Certificate cert = Ca.cert(secret, "foo-zookeeper-0.crt"); assertThat(cert.getSubjectX500Principal().getName(), is("CN=foo-zookeeper,O=io.strimzi")); - assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(set( + assertThat(new HashSet(cert.getSubjectAlternativeNames()), is(Set.of( asList(2, "foo-zookeeper-0"), asList(2, "foo-zookeeper-0.foo-zookeeper-nodes.test.svc"), asList(2, "foo-zookeeper-0.foo-zookeeper-nodes.test.svc.cluster.local"), @@ -427,17 +427,17 @@ public void testGenerateBrokerSecret() throws CertificateParsingException { @ParallelTest public void testTemplate() { - Map svcLabels = TestUtils.map("l5", "v5", "l6", "v6"); - Map svcAnnotations = TestUtils.map("a5", "v5", "a6", "v6"); + Map svcLabels = Map.of("l5", "v5", "l6", "v6"); + Map svcAnnotations = Map.of("a5", "v5", "a6", "v6"); - Map hSvcLabels = TestUtils.map("l7", "v7", "l8", "v8"); - Map hSvcAnnotations = TestUtils.map("a7", "v7", "a8", "v8"); + Map hSvcLabels = Map.of("l7", "v7", "l8", "v8"); + Map hSvcAnnotations = Map.of("a7", "v7", "a8", "v8"); - Map pdbLabels = TestUtils.map("l9", "v9", "l10", "v10"); - Map pdbAnnotations = TestUtils.map("a9", "v9", "a10", "v10"); + Map pdbLabels = Map.of("l9", "v9", "l10", "v10"); + Map pdbAnnotations = Map.of("a9", "v9", "a10", "v10"); - Map saLabels = TestUtils.map("l11", "v11", "l12", "v12"); - Map saAnnotations = TestUtils.map("a11", "v11", "a12", "v12"); + Map saLabels = Map.of("l11", "v11", "l12", "v12"); + Map saAnnotations = Map.of("a11", "v11", "a12", "v12"); Kafka kafkaAssembly = new KafkaBuilder(KAFKA) .editSpec() @@ -1009,8 +1009,8 @@ public void testDefaultPodDisruptionBudget() { @ParallelTest public void testCustomizedPodDisruptionBudget() { - Map pdbLabels = TestUtils.map("l1", "v1", "l2", "v2"); - Map pdbAnnos = TestUtils.map("a1", "v1", "a2", "v2"); + Map pdbLabels = Map.of("l1", "v1", "l2", "v2"); + Map pdbAnnos = Map.of("a1", "v1", "a2", "v2"); Kafka kafka = new KafkaBuilder(KAFKA) .editSpec() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/CaReconcilerTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/CaReconcilerTest.java index 97116eb60c0..7331b7ddda2 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/CaReconcilerTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/CaReconcilerTest.java @@ -45,6 +45,7 @@ import io.strimzi.operator.common.model.Labels; import io.strimzi.operator.common.model.PasswordGenerator; import io.strimzi.operator.common.operator.resource.ReconcileResult; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.vertx.core.Future; import io.vertx.core.Promise; @@ -86,7 +87,6 @@ import static io.strimzi.operator.common.model.Ca.CA_KEY; import static io.strimzi.operator.common.model.Ca.CA_STORE; import static io.strimzi.operator.common.model.Ca.CA_STORE_PASSWORD; -import static io.strimzi.test.TestUtils.set; import static java.util.Collections.singleton; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; @@ -301,12 +301,12 @@ public void testReconcileCasGeneratesCertsInitially(Vertx vertx, VertxTestContex .onComplete(context.succeeding(c -> context.verify(() -> { assertThat(c.getAllValues(), hasSize(4)); - assertThat(c.getAllValues().get(0).getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(c.getAllValues().get(0).getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(isCertInTrustStore(CA_CRT, c.getAllValues().get(0).getData()), is(true)); assertThat(c.getAllValues().get(1).getData().keySet(), is(singleton(CA_KEY))); - assertThat(c.getAllValues().get(2).getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(c.getAllValues().get(2).getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(isCertInTrustStore(CA_CRT, c.getAllValues().get(2).getData()), is(true)); assertThat(c.getAllValues().get(3).getData().keySet(), is(singleton(CA_KEY))); @@ -351,7 +351,7 @@ private void assertNoCertsGetGeneratedOutsideRenewalPeriod(Vertx vertx, VertxTes Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); Map clusterCaCertData = initialClusterCaCertSecret.getData(); - assertThat(clusterCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clusterCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(clusterCaCertData.get(CA_CRT), is(notNullValue())); assertThat(clusterCaCertData.get(CA_STORE), is(notNullValue())); assertThat(clusterCaCertData.get(CA_STORE_PASSWORD), is(notNullValue())); @@ -366,7 +366,7 @@ private void assertNoCertsGetGeneratedOutsideRenewalPeriod(Vertx vertx, VertxTes Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); Map clientsCaCertData = initialClientsCaCertSecret.getData(); - assertThat(clientsCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clientsCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(clientsCaCertData.get(CA_CRT), is(notNullValue())); assertThat(clientsCaCertData.get(CA_STORE), is(notNullValue())); assertThat(clientsCaCertData.get(CA_STORE_PASSWORD), is(notNullValue())); @@ -386,18 +386,18 @@ private void assertNoCertsGetGeneratedOutsideRenewalPeriod(Vertx vertx, VertxTes reconcileCa(vertx, certificateAuthority, certificateAuthority) .onComplete(context.succeeding(c -> context.verify(() -> { - assertThat(c.getAllValues().get(0).getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(c.getAllValues().get(0).getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(c.getAllValues().get(0).getData().get(CA_CRT), is(initialClusterCaCertSecret.getData().get(CA_CRT))); assertThat(x509Certificate(initialClusterCaCertSecret.getData().get(CA_CRT)), is(getCertificateFromTrustStore(CA_CRT, c.getAllValues().get(0).getData()))); - assertThat(c.getAllValues().get(1).getData().keySet(), is(set(CA_KEY))); + assertThat(c.getAllValues().get(1).getData().keySet(), is(Set.of(CA_KEY))); assertThat(c.getAllValues().get(1).getData().get(CA_KEY), is(initialClusterCaKeySecret.getData().get(CA_KEY))); - assertThat(c.getAllValues().get(2).getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(c.getAllValues().get(2).getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(c.getAllValues().get(2).getData().get(CA_CRT), is(initialClientsCaCertSecret.getData().get(CA_CRT))); assertThat(x509Certificate(initialClientsCaCertSecret.getData().get(CA_CRT)), is(getCertificateFromTrustStore(CA_CRT, c.getAllValues().get(2).getData()))); - assertThat(c.getAllValues().get(3).getData().keySet(), is(set(CA_KEY))); + assertThat(c.getAllValues().get(3).getData().keySet(), is(Set.of(CA_KEY))); assertThat(c.getAllValues().get(3).getData().get(CA_KEY), is(initialClientsCaKeySecret.getData().get(CA_KEY))); async.flag(); }))); @@ -437,7 +437,7 @@ public void testGenerateTruststoreFromOldSecrets(Vertx vertx, VertxTestContext c assertThat(c.getAllValues(), hasSize(4)); Map clusterCaCertData = c.getAllValues().get(0).getData(); - assertThat(clusterCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clusterCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClusterCaCertStore = getCertificateFromTrustStore(CA_CRT, clusterCaCertData); String newClusterCaCert = clusterCaCertData.remove(CA_CRT); @@ -458,7 +458,7 @@ public void testGenerateTruststoreFromOldSecrets(Vertx vertx, VertxTestContext c assertThat(newClusterCaKey, is(initialClusterCaKeySecret.getData().get(CA_KEY))); Map clientsCaCertData = c.getAllValues().get(2).getData(); - assertThat(clientsCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clientsCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClientsCaCertStore = getCertificateFromTrustStore(CA_CRT, clientsCaCertData); String newClientsCaCert = clientsCaCertData.remove(CA_CRT); @@ -490,7 +490,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAuto(Vertx vertx, VertxTe List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -501,7 +501,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAuto(Vertx vertx, VertxTe List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -520,7 +520,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAuto(Vertx vertx, VertxTe assertThat(c.getAllValues(), hasSize(4)); Map clusterCaCertData = c.getAllValues().get(0).getData(); - assertThat(clusterCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clusterCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClusterCaCertStore = getCertificateFromTrustStore(CA_CRT, clusterCaCertData); String newClusterCaCert = clusterCaCertData.remove(CA_CRT); String newClusterCaCertStore = clusterCaCertData.remove(CA_STORE); @@ -540,7 +540,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAuto(Vertx vertx, VertxTe assertThat(newClusterCaKey, is(initialClusterCaKeySecret.getData().get(CA_KEY))); Map clientsCaCertData = c.getAllValues().get(2).getData(); - assertThat(clientsCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clientsCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClientsCaCertStore = getCertificateFromTrustStore(CA_CRT, clientsCaCertData); String newClientsCaCert = clientsCaCertData.remove(CA_CRT); @@ -584,7 +584,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoOutsideOfMaintenanceW List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -595,7 +595,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoOutsideOfMaintenanceW List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -614,7 +614,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoOutsideOfMaintenanceW assertThat(c.getAllValues(), hasSize(4)); Map clusterCaCertData = c.getAllValues().get(0).getData(); - assertThat(clusterCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clusterCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClusterCaCertStore = getCertificateFromTrustStore(CA_CRT, clusterCaCertData); assertThat(c.getAllValues().get(0).getMetadata().getAnnotations().get(Ca.ANNO_STRIMZI_IO_CA_CERT_GENERATION), is("0")); String newClusterCaCert = clusterCaCertData.remove(CA_CRT); @@ -636,7 +636,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoOutsideOfMaintenanceW assertThat(newClusterCaKey, is(initialClusterCaKeySecret.getData().get(CA_KEY))); Map clientsCaCertData = c.getAllValues().get(2).getData(); - assertThat(clientsCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clientsCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClientsCaCertStore = getCertificateFromTrustStore(CA_CRT, clientsCaCertData); assertThat(c.getAllValues().get(2).getMetadata().getAnnotations().get(Ca.ANNO_STRIMZI_IO_CA_CERT_GENERATION), is("0")); String newClientsCaCert = clientsCaCertData.remove(CA_CRT); @@ -681,7 +681,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoWithinMaintenanceWind List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -692,7 +692,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoWithinMaintenanceWind List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -711,7 +711,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoWithinMaintenanceWind assertThat(c.getAllValues().size(), is(4)); Map clusterCaCertData = c.getAllValues().get(0).getData(); - assertThat(clusterCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clusterCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClusterCaCertStore = getCertificateFromTrustStore(CA_CRT, clusterCaCertData); assertThat(c.getAllValues().get(0).getMetadata().getAnnotations(), hasEntry(Ca.ANNO_STRIMZI_IO_CA_CERT_GENERATION, "1")); String newClusterCaCert = clusterCaCertData.remove(CA_CRT); @@ -733,7 +733,7 @@ public void testNewCertsGetGeneratedWhenInRenewalPeriodAutoWithinMaintenanceWind assertThat(newClusterCaKey, is(initialClusterCaKeySecret.getData().get(CA_KEY))); Map clientsCaCertData = c.getAllValues().get(2).getData(); - assertThat(clientsCaCertData.keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(clientsCaCertData.keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); X509Certificate newX509ClientsCaCertStore = getCertificateFromTrustStore(CA_CRT, clientsCaCertData); assertThat(c.getAllValues().get(2).getMetadata().getAnnotations(), hasEntry(Ca.ANNO_STRIMZI_IO_CA_CERT_GENERATION, "1")); String newClientsCaCert = clientsCaCertData.remove(CA_CRT); @@ -770,7 +770,7 @@ public void testNewKeyGetGeneratedWhenInRenewalPeriodAuto(Vertx vertx, VertxTest List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -781,7 +781,7 @@ public void testNewKeyGetGeneratedWhenInRenewalPeriodAuto(Vertx vertx, VertxTest List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -881,7 +881,7 @@ public void testNewKeyGeneratedWhenInRenewalPeriodAutoOutsideOfTimeWindow(Vertx List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -892,7 +892,7 @@ public void testNewKeyGeneratedWhenInRenewalPeriodAutoOutsideOfTimeWindow(Vertx List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -977,7 +977,7 @@ public void testNewKeyGeneratedWhenInRenewalPeriodAutoWithinTimeWindow(Vertx ver List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -988,7 +988,7 @@ public void testNewKeyGeneratedWhenInRenewalPeriodAutoWithinTimeWindow(Vertx ver List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -1088,14 +1088,14 @@ public void testExpiredCertsGetRemovedAuto(Vertx vertx, VertxTestContext context List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); assertThat(isCertInTrustStore(CA_CRT, initialClusterCaCertSecret.getData()), is(true)); // add an expired certificate to the secret ... - String clusterCert = Objects.requireNonNull(TestUtils.readResource(getClass(), "cluster-ca.crt")); + String clusterCert = Objects.requireNonNull(ReadWriteUtils.readFileFromResources(getClass(), "cluster-ca.crt")); String encodedClusterCert = Base64.getEncoder().encodeToString(clusterCert.getBytes(StandardCharsets.UTF_8)); initialClusterCaCertSecret.getData().put("ca-2018-07-01T09-00-00.crt", encodedClusterCert); @@ -1116,14 +1116,14 @@ public void testExpiredCertsGetRemovedAuto(Vertx vertx, VertxTestContext context List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); assertThat(isCertInTrustStore(CA_CRT, initialClientsCaCertSecret.getData()), is(true)); // add an expired certificate to the secret ... - String clientCert = Objects.requireNonNull(TestUtils.readResource(getClass(), "clients-ca.crt")); + String clientCert = Objects.requireNonNull(ReadWriteUtils.readFileFromResources(getClass(), "clients-ca.crt")); String encodedClientCert = Base64.getEncoder().encodeToString(clientCert.getBytes(StandardCharsets.UTF_8)); initialClientsCaCertSecret.getData().put("ca-2018-07-01T09-00-00.crt", encodedClientCert); @@ -1187,7 +1187,7 @@ public void testCustomCertsNotReconciled(Vertx vertx, VertxTestContext context) List clusterCaSecrets = initialClusterCaSecrets(certificateAuthority); Secret initialClusterCaKeySecret = clusterCaSecrets.get(0); Secret initialClusterCaCertSecret = clusterCaSecrets.get(1); - assertThat(initialClusterCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClusterCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClusterCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClusterCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -1198,7 +1198,7 @@ public void testCustomCertsNotReconciled(Vertx vertx, VertxTestContext context) List clientsCaSecrets = initialClientsCaSecrets(certificateAuthority); Secret initialClientsCaKeySecret = clientsCaSecrets.get(0); Secret initialClientsCaCertSecret = clientsCaSecrets.get(1); - assertThat(initialClientsCaCertSecret.getData().keySet(), is(set(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); + assertThat(initialClientsCaCertSecret.getData().keySet(), is(Set.of(CA_CRT, CA_STORE, CA_STORE_PASSWORD))); assertThat(initialClientsCaCertSecret.getData().get(CA_CRT), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE), is(notNullValue())); assertThat(initialClientsCaCertSecret.getData().get(CA_STORE_PASSWORD), is(notNullValue())); @@ -1356,8 +1356,8 @@ public void testClusterCASecretsWithoutOwnerReference(Vertx vertx, VertxTestCont assertThat(clientsCaCertSecret.getMetadata().getOwnerReferences(), hasSize(1)); assertThat(clientsCaKeySecret.getMetadata().getOwnerReferences(), hasSize(1)); - assertThat(clientsCaCertSecret.getMetadata().getOwnerReferences().get(0), is(ownerReference)); - assertThat(clientsCaKeySecret.getMetadata().getOwnerReferences().get(0), is(ownerReference)); + TestUtils.checkOwnerReference(clientsCaCertSecret, kafka); + TestUtils.checkOwnerReference(clientsCaKeySecret, kafka); async.flag(); }))); @@ -1422,8 +1422,8 @@ public void testClientsCASecretsWithoutOwnerReference(Vertx vertx, VertxTestCont assertThat(clientsCaCertSecret.getMetadata().getOwnerReferences(), hasSize(0)); assertThat(clientsCaKeySecret.getMetadata().getOwnerReferences(), hasSize(0)); - assertThat(clusterCaCertSecret.getMetadata().getOwnerReferences().get(0), is(ownerReference)); - assertThat(clusterCaKeySecret.getMetadata().getOwnerReferences().get(0), is(ownerReference)); + TestUtils.checkOwnerReference(clusterCaCertSecret, kafka); + TestUtils.checkOwnerReference(clusterCaKeySecret, kafka); async.flag(); }))); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/ConnectorMockTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/ConnectorMockTest.java index b52d83e060b..1cf0c50fece 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/ConnectorMockTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/ConnectorMockTest.java @@ -47,7 +47,7 @@ import io.strimzi.operator.common.model.Labels; import io.strimzi.operator.common.model.OrderedProperties; import io.strimzi.platform.KubernetesVersion; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.mockkube3.MockKube3; import io.vertx.core.Future; import io.vertx.core.Promise; @@ -78,9 +78,7 @@ import java.util.function.Predicate; import java.util.stream.Collectors; -import static io.strimzi.test.TestUtils.map; import static io.strimzi.test.TestUtils.waitFor; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; @@ -187,7 +185,7 @@ public void beforeEach(TestInfo testInfo, VertxTestContext testContext) { setupMockConnectAPI(); - ClusterOperatorConfig config = ClusterOperatorConfig.buildFromMap(map( + ClusterOperatorConfig config = ClusterOperatorConfig.buildFromMap(Map.of( ClusterOperatorConfig.STRIMZI_KAFKA_IMAGES, KafkaVersionTestUtils.getKafkaImagesEnvVarString(), ClusterOperatorConfig.STRIMZI_KAFKA_CONNECT_IMAGES, KafkaVersionTestUtils.getKafkaConnectImagesEnvVarString(), ClusterOperatorConfig.STRIMZI_KAFKA_MIRROR_MAKER_2_IMAGES, KafkaVersionTestUtils.getKafkaMirrorMaker2ImagesEnvVarString(), @@ -268,10 +266,10 @@ private void setupMockConnectAPI() { if (connectorStatus == null) { return Future.failedFuture(new ConnectRestException("GET", String.format("/connectors/%s", connectorName), 404, "Not Found", "")); } - return Future.succeededFuture(TestUtils.map( + return Future.succeededFuture(Map.of( "name", connectorName, "config", connectorStatus.config, - "tasks", emptyMap())); + "tasks", Map.of())); }); when(api.createOrUpdatePutRequest(any(), any(), anyInt(), anyString(), any())).thenAnswer(invocation -> { LOGGER.info((String) invocation.getArgument(1) + invocation.getArgument(2) + invocation.getArgument(3) + invocation.getArgument(4)); @@ -466,7 +464,7 @@ public void waitForConnectorState(String connectorName, String state) { Map connector = s.getStatus().getConnectorStatus(); if (connector != null) { @SuppressWarnings({ "rawtypes" }) - Object connectorState = ((Map) connector.getOrDefault("connector", emptyMap())).get("state"); + Object connectorState = ((Map) connector.getOrDefault("connector", Map.of())).get("state"); return connectorState instanceof String && state.equals(connectorState); } else { @@ -1380,7 +1378,7 @@ public void testConnectorBothStateAndPause() { " level: INFO\n" + " topics: timer-topic"; - KafkaConnector kcr = TestUtils.fromYamlString(yaml, KafkaConnector.class); + KafkaConnector kcr = ReadWriteUtils.readObjectFromYamlString(yaml, KafkaConnector.class); Crds.kafkaConnectorOperation(client).inNamespace(namespace).resource(kcr).create(); waitForConnectorReady(connectorName); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorTest.java index c52efd5797a..024435ccb0b 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorTest.java @@ -116,7 +116,7 @@ import java.util.stream.Collectors; import static io.strimzi.operator.common.model.Ca.x509Certificate; -import static io.strimzi.test.TestUtils.set; +import static io.strimzi.test.TestUtils.modifiableSet; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.singletonMap; @@ -527,7 +527,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List expectedSecrets = set( + Set expectedSecrets = modifiableSet( KafkaResources.clientsCaKeySecretName(CLUSTER_NAME), KafkaResources.clientsCaCertificateSecretName(CLUSTER_NAME), KafkaResources.clusterCaCertificateSecretName(CLUSTER_NAME), @@ -687,7 +687,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List expectedRoutes = set(KafkaResources.bootstrapServiceName(CLUSTER_NAME)); + Set expectedRoutes = modifiableSet(KafkaResources.bootstrapServiceName(CLUSTER_NAME)); for (NodeRef node : kafkaCluster.nodes()) { if (node.broker()) { expectedRoutes.add(node.podName()); @@ -883,13 +883,13 @@ private void updateCluster(VertxTestContext context, Kafka originalKafka, Kafka when(mockCmOps.listAsync(NAMESPACE, updatedKafkaCluster.getSelectorLabels())).thenReturn(Future.succeededFuture(List.of())); when(mockCmOps.deleteAsync(any(), any(), any(), anyBoolean())).thenReturn(Future.succeededFuture()); - Set metricsCms = set(); + Set metricsCms = modifiableSet(); doAnswer(invocation -> { metricsCms.add(invocation.getArgument(1)); return Future.succeededFuture(); }).when(mockCmOps).reconcile(any(), eq(NAMESPACE), any(), any()); - Set logCms = set(); + Set logCms = modifiableSet(); doAnswer(invocation -> { logCms.add(invocation.getArgument(1)); return Future.succeededFuture(); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorZooBasedTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorZooBasedTest.java index 6244c97a163..3a65883a3b8 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorZooBasedTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaAssemblyOperatorZooBasedTest.java @@ -90,7 +90,7 @@ import io.strimzi.operator.common.operator.MockCertManager; import io.strimzi.operator.common.operator.resource.ReconcileResult; import io.strimzi.platform.KubernetesVersion; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.core.WorkerExecutor; @@ -123,7 +123,7 @@ import java.util.stream.Collectors; import static io.strimzi.operator.common.model.Ca.x509Certificate; -import static io.strimzi.test.TestUtils.set; +import static io.strimzi.test.TestUtils.modifiableSet; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -576,7 +576,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List s ArgumentCaptor pvcCaptor = ArgumentCaptor.forClass(PersistentVolumeClaim.class); when(mockPvcOps.reconcile(any(), anyString(), anyString(), pvcCaptor.capture())).thenReturn(Future.succeededFuture()); - Set expectedSecrets = set( + Set expectedSecrets = modifiableSet( KafkaResources.clientsCaKeySecretName(kafkaName), KafkaResources.clientsCaCertificateSecretName(kafkaName), KafkaResources.clusterCaCertificateSecretName(kafkaName), @@ -672,7 +672,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List s .onComplete(context.succeeding(status -> context.verify(() -> { // We expect a headless and headful service - Set expectedServices = set( + Set expectedServices = modifiableSet( KafkaResources.zookeeperHeadlessServiceName(kafkaName), KafkaResources.zookeeperServiceName(kafkaName), KafkaResources.bootstrapServiceName(kafkaName), @@ -701,7 +701,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List s List capturedSps = spsCaptor.getAllValues(); // We expect a StrimziPodSet for kafka and zookeeper... assertThat(capturedSps.stream().map(sps -> sps.getMetadata().getName()).collect(Collectors.toSet()), - is(set(KafkaResources.kafkaComponentName(kafkaName), KafkaResources.zookeeperComponentName(kafkaName)))); + is(modifiableSet(KafkaResources.kafkaComponentName(kafkaName), KafkaResources.zookeeperComponentName(kafkaName)))); // expected Secrets with certificates assertThat(new TreeSet<>(secretsMap.keySet()), is(new TreeSet<>(expectedSecrets))); @@ -733,7 +733,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List s // Check PDBs assertThat(pdbCaptor.getAllValues(), hasSize(2)); assertThat(pdbCaptor.getAllValues().stream().map(sts -> sts.getMetadata().getName()).collect(Collectors.toSet()), - is(set(KafkaResources.kafkaComponentName(kafkaName), KafkaResources.zookeeperComponentName(kafkaName)))); + is(modifiableSet(KafkaResources.kafkaComponentName(kafkaName), KafkaResources.zookeeperComponentName(kafkaName)))); // Check PVCs assertThat(pvcCaptor.getAllValues(), hasSize(expectedPvcs.size())); @@ -745,7 +745,7 @@ private void createCluster(VertxTestContext context, Kafka kafka, List s // Verify deleted routes if (openShift) { - Set expectedRoutes = set(KafkaResources.bootstrapServiceName(kafkaName)); + Set expectedRoutes = modifiableSet(KafkaResources.bootstrapServiceName(kafkaName)); for (NodeRef node : kafkaCluster.nodes()) { expectedRoutes.add(node.podName()); @@ -990,7 +990,7 @@ private void updateCluster(VertxTestContext context, Kafka originalAssembly, Kaf .withName(KafkaResources.zookeeperMetricsAndLogConfigMapName(clusterName)) .withNamespace(clusterNamespace) .endMetadata() - .withData(singletonMap(MetricsModel.CONFIG_MAP_KEY, TestUtils.toYamlString(METRICS_CONFIG))) + .withData(singletonMap(MetricsModel.CONFIG_MAP_KEY, ReadWriteUtils.writeObjectToYamlString(METRICS_CONFIG))) .build(); when(mockCmOps.get(clusterNamespace, KafkaResources.zookeeperMetricsAndLogConfigMapName(clusterName))).thenReturn(zkMetricsCm); @@ -1224,13 +1224,13 @@ private void updateCluster(VertxTestContext context, Kafka originalAssembly, Kaf } // Mock CM patch - Set metricsCms = set(); + Set metricsCms = modifiableSet(); doAnswer(invocation -> { metricsCms.add(invocation.getArgument(1)); return Future.succeededFuture(); }).when(mockCmOps).reconcile(any(), eq(clusterNamespace), any(), any()); - Set logCms = set(); + Set logCms = modifiableSet(); doAnswer(invocation -> { logCms.add(invocation.getArgument(1)); return Future.succeededFuture(); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaBridgeAssemblyOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaBridgeAssemblyOperatorTest.java index dc94b410138..2e0b74d6790 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaBridgeAssemblyOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaBridgeAssemblyOperatorTest.java @@ -322,7 +322,7 @@ public void testCreateOrUpdateUpdatesCluster(VertxTestContext context) { when(mockCmOps.get(kbNamespace, KafkaBridgeResources.metricsAndLogConfigMapName(kbName))).thenReturn(metricsCm); // Mock CM patch - Set metricsCms = TestUtils.set(); + Set metricsCms = TestUtils.modifiableSet(); doAnswer(invocation -> { metricsCms.add(invocation.getArgument(1)); return Future.succeededFuture(); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectApiIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectApiIT.java index ebf897b7c77..2268cb3dc9f 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectApiIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectApiIT.java @@ -8,7 +8,6 @@ import io.strimzi.operator.common.BackOff; import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.common.model.OrderedProperties; -import io.strimzi.test.TestUtils; import io.strimzi.test.container.StrimziKafkaCluster; import io.vertx.core.Future; import io.vertx.core.Handler; @@ -159,7 +158,7 @@ public void handle(Long timerId) { }))) .compose(status -> client.getConnectorConfig(Reconciliation.DUMMY_RECONCILIATION, new BackOff(10), "localhost", port, "test")) .onComplete(context.succeeding(config -> context.verify(() -> { - assertThat(config, is(TestUtils.map("connector.class", "FileStreamSource", + assertThat(config, is(Map.of("connector.class", "FileStreamSource", "file", "/dev/null", "tasks.max", "1", "name", "test", diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectAssemblyOperatorMockTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectAssemblyOperatorMockTest.java index f7554faa87c..54a5ad6b1f4 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectAssemblyOperatorMockTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaConnectAssemblyOperatorMockTest.java @@ -20,7 +20,6 @@ import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.common.model.Labels; import io.strimzi.platform.KubernetesVersion; -import io.strimzi.test.TestUtils; import io.strimzi.test.mockkube3.MockKube3; import io.vertx.core.Future; import io.vertx.core.Promise; @@ -41,6 +40,7 @@ import java.util.List; import java.util.Locale; +import java.util.Map; import static java.util.Collections.emptyList; import static java.util.Collections.singletonMap; @@ -144,7 +144,7 @@ public void testReconcileCreateAndUpdate(VertxTestContext context) { .withNewMetadata() .withName(CLUSTER_NAME) .withNamespace(namespace) - .withLabels(TestUtils.map("foo", "bar")) + .withLabels(Map.of("foo", "bar")) .endMetadata() .withNewSpec() .withReplicas(REPLICAS) @@ -172,7 +172,7 @@ public void testPauseReconcileUnpause(VertxTestContext context) { .withNewMetadata() .withName(CLUSTER_NAME) .withNamespace(namespace) - .withLabels(TestUtils.map("foo", "bar")) + .withLabels(Map.of("foo", "bar")) .withAnnotations(singletonMap("strimzi.io/pause-reconciliation", "true")) .endMetadata() .withNewSpec() @@ -213,7 +213,7 @@ public void testPauseReconcileUnpause(VertxTestContext context) { .withNewMetadata() .withName(CLUSTER_NAME) .withNamespace(namespace) - .withLabels(TestUtils.map("foo", "bar")) + .withLabels(Map.of("foo", "bar")) .withAnnotations(singletonMap("strimzi.io/pause-reconciliation", "false")) .endMetadata() .withNewSpec() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorConnectorAutoRestartTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorConnectorAutoRestartTest.java index fbd366f48f6..58621eac35f 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorConnectorAutoRestartTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorConnectorAutoRestartTest.java @@ -20,7 +20,6 @@ import io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier; import io.strimzi.operator.common.Reconciliation; import io.strimzi.platform.KubernetesVersion; -import io.strimzi.test.TestUtils; import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.junit5.Checkpoint; @@ -61,7 +60,7 @@ public class KafkaMirrorMaker2AssemblyOperatorConnectorAutoRestartTest { .withNewMetadata() .withName("my-mm2") .withNamespace("namespace") - .withLabels(TestUtils.map("foo", "bar")) + .withLabels(Map.of("foo", "bar")) .endMetadata() .withNewSpec() .withReplicas(1) diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorMockTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorMockTest.java index 9c771bb3354..c97769bf2d7 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorMockTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMaker2AssemblyOperatorMockTest.java @@ -28,7 +28,6 @@ import io.strimzi.operator.common.model.Labels; import io.strimzi.operator.common.model.OrderedProperties; import io.strimzi.platform.KubernetesVersion; -import io.strimzi.test.TestUtils; import io.strimzi.test.mockkube3.MockKube3; import io.vertx.core.Future; import io.vertx.core.Promise; @@ -49,6 +48,7 @@ import java.util.List; import java.util.Locale; +import java.util.Map; import static java.util.Collections.emptyList; import static java.util.Collections.singletonMap; @@ -166,7 +166,7 @@ public void testReconcileUpdate(VertxTestContext context) { .withNewMetadata() .withName(CLUSTER_NAME) .withNamespace(namespace) - .withLabels(TestUtils.map("foo", "bar")) + .withLabels(Map.of("foo", "bar")) .endMetadata() .withNewSpec() .withReplicas(REPLICAS) @@ -197,7 +197,7 @@ public void testPauseReconcile(VertxTestContext context) { .withNewMetadata() .withName(CLUSTER_NAME) .withNamespace(namespace) - .withLabels(TestUtils.map("foo", "bar")) + .withLabels(Map.of("foo", "bar")) .withAnnotations(singletonMap("strimzi.io/pause-reconciliation", "true")) .endMetadata() .withNewSpec() diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMakerAssemblyOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMakerAssemblyOperatorTest.java index 7950461fc1a..eafa800be4a 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMakerAssemblyOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaMirrorMakerAssemblyOperatorTest.java @@ -314,7 +314,7 @@ public void testUpdateCluster(VertxTestContext context) { when(mockCmOps.get(kmmNamespace, KafkaMirrorMakerResources.metricsAndLogConfigMapName(kmmName))).thenReturn(metricsCm); // Mock CM patch - Set metricsCms = TestUtils.set(); + Set metricsCms = TestUtils.modifiableSet(); doAnswer(invocation -> { metricsCms.add(invocation.getArgument(1)); return Future.succeededFuture(); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceAssemblyOperatorTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceAssemblyOperatorTest.java index 1e8718c6c94..001830bba8e 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceAssemblyOperatorTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceAssemblyOperatorTest.java @@ -42,6 +42,7 @@ import io.strimzi.operator.common.model.cruisecontrol.CruiseControlEndpoints; import io.strimzi.operator.common.operator.MockCertManager; import io.strimzi.platform.KubernetesVersion; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.mockkube3.MockKube3; import io.vertx.core.Vertx; @@ -139,8 +140,8 @@ public static void beforeAll() throws IOException { // Configure Cruise Control mock cruiseControlPort = TestUtils.getFreePort(); - tlsKeyFile = TestUtils.tempFile(KafkaRebalanceAssemblyOperatorTest.class.getSimpleName(), ".key"); - tlsCrtFile = TestUtils.tempFile(KafkaRebalanceAssemblyOperatorTest.class.getSimpleName(), ".crt"); + tlsKeyFile = ReadWriteUtils.tempFile(KafkaRebalanceAssemblyOperatorTest.class.getSimpleName(), ".key"); + tlsCrtFile = ReadWriteUtils.tempFile(KafkaRebalanceAssemblyOperatorTest.class.getSimpleName(), ".crt"); new MockCertManager().generateSelfSignedCert(tlsKeyFile, tlsCrtFile, new Subject.Builder().withCommonName("Trusted Test CA").build(), 365); @@ -1412,7 +1413,7 @@ public void testRebalanceUsesUnknownProperty(VertxTestContext context) throws IO "spec:\n" + " unknown: \"value\""; - KafkaRebalance kr = TestUtils.fromYamlString(rebalanceString, KafkaRebalance.class); + KafkaRebalance kr = ReadWriteUtils.readObjectFromYamlString(rebalanceString, KafkaRebalance.class); Crds.kafkaRebalanceOperation(client).inNamespace(namespace).resource(kr).create(); crdCreateKafka(); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceStateMachineTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceStateMachineTest.java index ebc265fa4b9..7612c099a6c 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceStateMachineTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/KafkaRebalanceStateMachineTest.java @@ -30,6 +30,7 @@ import io.strimzi.operator.common.model.Labels; import io.strimzi.operator.common.model.cruisecontrol.CruiseControlEndpoints; import io.strimzi.operator.common.operator.MockCertManager; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.vertx.core.AsyncResult; import io.vertx.core.Future; @@ -82,8 +83,8 @@ public class KafkaRebalanceStateMachineTest { @BeforeAll public static void before() throws IOException { - File tlsKeyFile = TestUtils.tempFile(KafkaRebalanceStateMachineTest.class.getSimpleName(), ".key"); - File tlsCrtFile = TestUtils.tempFile(KafkaRebalanceStateMachineTest.class.getSimpleName(), ".crt"); + File tlsKeyFile = ReadWriteUtils.tempFile(KafkaRebalanceStateMachineTest.class.getSimpleName(), ".key"); + File tlsCrtFile = ReadWriteUtils.tempFile(KafkaRebalanceStateMachineTest.class.getSimpleName(), ".crt"); new MockCertManager().generateSelfSignedCert(tlsKeyFile, tlsCrtFile, new Subject.Builder().withCommonName("Trusted Test CA").build(), 365); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/StrimziPodSetControllerIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/StrimziPodSetControllerIT.java index 90ba016182d..49184f14855 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/StrimziPodSetControllerIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/assembly/StrimziPodSetControllerIT.java @@ -37,6 +37,7 @@ import io.strimzi.operator.cluster.operator.resource.kubernetes.StrimziPodSetOperator; import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.common.model.Labels; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import io.vertx.core.Vertx; import io.vertx.junit5.VertxExtension; @@ -87,10 +88,10 @@ public static void beforeAll() { LOGGER.info("Created namespace"); LOGGER.info("Creating CRDs"); - TestUtils.createCrd(client, Kafka.CRD_NAME, TestUtils.CRD_KAFKA); - TestUtils.createCrd(client, KafkaConnect.CRD_NAME, TestUtils.CRD_KAFKA_CONNECT); - TestUtils.createCrd(client, KafkaMirrorMaker2.CRD_NAME, TestUtils.CRD_KAFKA_MIRROR_MAKER_2); - TestUtils.createCrd(client, StrimziPodSet.CRD_NAME, TestUtils.CRD_STRIMZI_POD_SET); + CrdUtils.createCrd(client, Kafka.CRD_NAME, CrdUtils.CRD_KAFKA); + CrdUtils.createCrd(client, KafkaConnect.CRD_NAME, CrdUtils.CRD_KAFKA_CONNECT); + CrdUtils.createCrd(client, KafkaMirrorMaker2.CRD_NAME, CrdUtils.CRD_KAFKA_MIRROR_MAKER_2); + CrdUtils.createCrd(client, StrimziPodSet.CRD_NAME, CrdUtils.CRD_STRIMZI_POD_SET); LOGGER.info("Created CRDs"); vertx = Vertx.vertx(); @@ -113,10 +114,10 @@ public static void afterAll() { kafkaOp().inNamespace(NAMESPACE).withName(KAFKA_NAME).delete(); kafkaOp().inNamespace(NAMESPACE).withName(OTHER_KAFKA_NAME).delete(); - TestUtils.deleteCrd(client, Kafka.CRD_NAME); - TestUtils.deleteCrd(client, KafkaConnect.CRD_NAME); - TestUtils.deleteCrd(client, KafkaMirrorMaker2.CRD_NAME); - TestUtils.deleteCrd(client, StrimziPodSet.CRD_NAME); + CrdUtils.deleteCrd(client, Kafka.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaConnect.CRD_NAME); + CrdUtils.deleteCrd(client, KafkaMirrorMaker2.CRD_NAME); + CrdUtils.deleteCrd(client, StrimziPodSet.CRD_NAME); TestUtils.deleteNamespace(client, NAMESPACE); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/KafkaBrokerConfigurationDiffTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/KafkaBrokerConfigurationDiffTest.java index 1ec0c5ef05e..7d10df3d2e0 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/KafkaBrokerConfigurationDiffTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/KafkaBrokerConfigurationDiffTest.java @@ -9,7 +9,7 @@ import io.strimzi.operator.cluster.model.KafkaVersion; import io.strimzi.operator.cluster.model.NodeRef; import io.strimzi.operator.common.Reconciliation; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.apache.kafka.clients.admin.AlterConfigOp; import org.apache.kafka.clients.admin.Config; import org.apache.kafka.clients.admin.ConfigEntry; @@ -53,7 +53,7 @@ private ConfigEntry instantiateConfigEntry(String name, String val) { private String getDesiredConfiguration(List additional) { try (InputStream is = getClass().getClassLoader().getResourceAsStream("desired-kafka-broker.conf")) { - String desiredConfigString = TestUtils.readResource(is); + String desiredConfigString = ReadWriteUtils.readInputStream(is); for (ConfigEntry ce : additional) { desiredConfigString += "\n" + ce.name() + "=" + ce.value(); @@ -71,7 +71,7 @@ private Config getCurrentConfiguration(List additional) { try (InputStream is = getClass().getClassLoader().getResourceAsStream("current-kafka-broker.conf")) { - List configList = Arrays.asList(TestUtils.readResource(is).split(System.getProperty("line.separator"))); + List configList = Arrays.asList(ReadWriteUtils.readInputStream(is).split(System.getProperty("line.separator"))); configList.forEach(entry -> { String[] split = entry.split("="); String val = split.length == 1 ? "" : split[1]; diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/CruiseControlClientTest.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/CruiseControlClientTest.java index 73e57278011..9c58baa4e14 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/CruiseControlClientTest.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/CruiseControlClientTest.java @@ -10,6 +10,7 @@ import io.strimzi.operator.common.model.cruisecontrol.CruiseControlRebalanceKeys; import io.strimzi.operator.common.model.cruisecontrol.CruiseControlUserTaskStatus; import io.strimzi.operator.common.operator.MockCertManager; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.vertx.core.Future; import io.vertx.core.Vertx; @@ -51,8 +52,8 @@ public class CruiseControlClientTest { @BeforeAll public static void setupServer() throws IOException { cruiseControlPort = TestUtils.getFreePort(); - File tlsKeyFile = TestUtils.tempFile(CruiseControlClientTest.class.getSimpleName(), ".key"); - File tlsCrtFile = TestUtils.tempFile(CruiseControlClientTest.class.getSimpleName(), ".crt"); + File tlsKeyFile = ReadWriteUtils.tempFile(CruiseControlClientTest.class.getSimpleName(), ".key"); + File tlsCrtFile = ReadWriteUtils.tempFile(CruiseControlClientTest.class.getSimpleName(), ".crt"); new MockCertManager().generateSelfSignedCert(tlsKeyFile, tlsCrtFile, new Subject.Builder().withCommonName("Trusted Test CA").build(), 365); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/MockCruiseControl.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/MockCruiseControl.java index 7cf65d8dc83..311aee39566 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/MockCruiseControl.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/cruisecontrol/MockCruiseControl.java @@ -14,7 +14,7 @@ import io.strimzi.operator.common.model.cruisecontrol.CruiseControlEndpoints; import io.strimzi.operator.common.model.cruisecontrol.CruiseControlParameters; import io.strimzi.operator.common.operator.MockCertManager; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.mockserver.configuration.ConfigurationProperties; import org.mockserver.integration.ClientAndServer; import org.mockserver.matchers.Times; @@ -127,7 +127,7 @@ public boolean isRunning() { */ public void setupCCStateResponse() { // Non-verbose response - JsonBody jsonProposalNotReady = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-State-proposal-not-ready.json")); + JsonBody jsonProposalNotReady = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-State-proposal-not-ready.json")); server .when( @@ -146,7 +146,7 @@ public void setupCCStateResponse() { // Non-verbose response - JsonBody json = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-State.json")); + JsonBody json = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-State.json")); server .when( @@ -164,7 +164,7 @@ public void setupCCStateResponse() { .withDelay(TimeUnit.SECONDS, 0)); // Verbose response - JsonBody jsonVerbose = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-State-verbose.json")); + JsonBody jsonVerbose = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-State-verbose.json")); server .when( @@ -188,7 +188,7 @@ public void setupCCStateResponse() { */ public void setupCCRebalanceNotEnoughDataError(CruiseControlEndpoints endpoint) { // Rebalance response with no goal that returns an error - JsonBody jsonError = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Rebalance-NotEnoughValidWindows-error.json")); + JsonBody jsonError = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Rebalance-NotEnoughValidWindows-error.json")); server .when( @@ -214,7 +214,7 @@ public void setupCCRebalanceNotEnoughDataError(CruiseControlEndpoints endpoint) */ public void setupCCBrokerDoesNotExist(CruiseControlEndpoints endpoint) { // Add/remove broker response with no goal that returns an error - JsonBody jsonError = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Broker-not-exist.json")); + JsonBody jsonError = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Broker-not-exist.json")); server .when( @@ -248,7 +248,7 @@ public void setupCCRebalanceResponse(int pendingCalls, CruiseControlEndpoints en */ public void setupCCRebalanceResponse(int pendingCalls, int responseDelay, CruiseControlEndpoints endpoint) { // Rebalance in progress response with no goals set - non-verbose - JsonBody pendingJson = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Rebalance-no-goals-in-progress.json")); + JsonBody pendingJson = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Rebalance-no-goals-in-progress.json")); server .when( request() @@ -269,7 +269,7 @@ public void setupCCRebalanceResponse(int pendingCalls, int responseDelay, Cruise .withDelay(TimeUnit.SECONDS, responseDelay)); // Rebalance response with no goals set - non-verbose - JsonBody json = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Rebalance-no-goals.json")); + JsonBody json = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Rebalance-no-goals.json")); server .when( @@ -290,7 +290,7 @@ public void setupCCRebalanceResponse(int pendingCalls, int responseDelay, Cruise .withDelay(TimeUnit.SECONDS, responseDelay)); // Rebalance response with no goals set - verbose - JsonBody jsonVerbose = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Rebalance-no-goals-verbose.json")); + JsonBody jsonVerbose = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Rebalance-no-goals-verbose.json")); server .when( @@ -316,7 +316,7 @@ public void setupCCRebalanceResponse(int pendingCalls, int responseDelay, Cruise */ public void setupCCRebalanceBadGoalsError(CruiseControlEndpoints endpoint) { // Response if the user has set custom goals which do not include all configured hard.goals - JsonBody jsonError = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Rebalance-bad-goals-error.json")); + JsonBody jsonError = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Rebalance-bad-goals-error.json")); server .when( @@ -339,7 +339,7 @@ public void setupCCRebalanceBadGoalsError(CruiseControlEndpoints endpoint) { // Response if the user has set custom goals which do not include all configured hard.goals // Note: This uses the no-goals example response but the difference between custom goals and default goals is not tested here - JsonBody jsonSummary = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Rebalance-no-goals-verbose.json")); + JsonBody jsonSummary = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Rebalance-no-goals-verbose.json")); server .when( @@ -372,9 +372,9 @@ public void setupCCRebalanceBadGoalsError(CruiseControlEndpoints endpoint) { */ public void setupCCUserTasksResponseNoGoals(int activeCalls, int inExecutionCalls) throws IOException, URISyntaxException { // User tasks response for the rebalance request with no goals set (non-verbose) - JsonBody jsonActive = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-Active.json")); - JsonBody jsonInExecution = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-inExecution.json")); - JsonBody jsonCompleted = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-completed.json")); + JsonBody jsonActive = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-Active.json")); + JsonBody jsonInExecution = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-inExecution.json")); + JsonBody jsonCompleted = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-completed.json")); // The first activeCalls times respond that with a status of "Active" server @@ -431,9 +431,9 @@ public void setupCCUserTasksResponseNoGoals(int activeCalls, int inExecutionCall .withDelay(TimeUnit.SECONDS, 0)); // User tasks response for the rebalance request with no goals set (verbose) - JsonBody jsonActiveVerbose = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-verbose-Active.json")); - JsonBody jsonInExecutionVerbose = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-verbose-inExecution.json")); - JsonBody jsonCompletedVerbose = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-verbose-completed.json")); + JsonBody jsonActiveVerbose = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-verbose-Active.json")); + JsonBody jsonInExecutionVerbose = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-verbose-inExecution.json")); + JsonBody jsonCompletedVerbose = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-rebalance-no-goals-verbose-completed.json")); // The first activeCalls times respond that with a status of "Active" server @@ -496,7 +496,7 @@ public void setupCCUserTasksResponseNoGoals(int activeCalls, int inExecutionCall */ public void setupCCUserTasksCompletedWithError() throws IOException, URISyntaxException { // This simulates asking for the status of a task that has Complete with error and fetch_completed_task=true - JsonBody compWithErrorJson = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-status-completed-with-error.json")); + JsonBody compWithErrorJson = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-status-completed-with-error.json")); server .when( @@ -520,7 +520,7 @@ public void setupCCUserTasksCompletedWithError() throws IOException, URISyntaxEx */ public void setupUserTasktoEmpty() { // This simulates asking for the status with empty user task - JsonBody jsonEmptyUserTask = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-User-task-status-empty.json")); + JsonBody jsonEmptyUserTask = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-User-task-status-empty.json")); server .when( @@ -543,7 +543,7 @@ public void setupUserTasktoEmpty() { * Setup response of task being stopped. */ public void setupCCStopResponse() { - JsonBody jsonStop = new JsonBody(TestUtils.jsonFromResource(CC_JSON_ROOT + "CC-Stop.json")); + JsonBody jsonStop = new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile(CC_JSON_ROOT + "CC-Stop.json")); server .when( diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/AbstractCustomResourceOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/AbstractCustomResourceOperatorIT.java index de4f4ee1b8c..b719fb52e39 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/AbstractCustomResourceOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/AbstractCustomResourceOperatorIT.java @@ -12,6 +12,7 @@ import io.strimzi.api.kafka.model.common.Condition; import io.strimzi.api.kafka.model.common.ConditionBuilder; import io.strimzi.operator.common.Reconciliation; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import io.vertx.core.Promise; import io.vertx.core.Vertx; @@ -80,14 +81,14 @@ public void before() { LOGGER.info("Created namespace"); LOGGER.info("Creating CRD"); - TestUtils.createCrd(client, getCrdName(), getCrd()); + CrdUtils.createCrd(client, getCrdName(), getCrd()); LOGGER.info("Created CRD"); } @AfterAll public void after() { LOGGER.info("Deleting CRD"); - TestUtils.deleteCrd(client, getCrdName()); + CrdUtils.deleteCrd(client, getCrdName()); TestUtils.deleteNamespace(client, getNamespace()); client.close(); diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaBridgeCrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaBridgeCrdOperatorIT.java index 0892e5e485c..bc17505e572 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaBridgeCrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaBridgeCrdOperatorIT.java @@ -10,7 +10,7 @@ import io.strimzi.api.kafka.model.bridge.KafkaBridgeList; import io.strimzi.api.kafka.model.common.ConditionBuilder; import io.strimzi.api.kafka.model.common.InlineLogging; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.apache.logging.log4j.LogManager; @@ -37,7 +37,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA_BRIDGE; + return CrdUtils.CRD_KAFKA_BRIDGE; } @Override diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectCrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectCrdOperatorIT.java index 23451817047..fb418d5273f 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectCrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectCrdOperatorIT.java @@ -8,7 +8,7 @@ import io.strimzi.api.kafka.model.connect.KafkaConnect; import io.strimzi.api.kafka.model.connect.KafkaConnectBuilder; import io.strimzi.api.kafka.model.connect.KafkaConnectList; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.apache.logging.log4j.LogManager; @@ -35,7 +35,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA_CONNECT; + return CrdUtils.CRD_KAFKA_CONNECT; } @Override diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectorCrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectorCrdOperatorIT.java index 3b0070d2754..eb4d3a4107f 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectorCrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaConnectorCrdOperatorIT.java @@ -8,7 +8,7 @@ import io.strimzi.api.kafka.model.connector.KafkaConnector; import io.strimzi.api.kafka.model.connector.KafkaConnectorBuilder; import io.strimzi.api.kafka.model.connector.KafkaConnectorList; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.apache.logging.log4j.LogManager; @@ -35,7 +35,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA_CONNECTOR; + return CrdUtils.CRD_KAFKA_CONNECTOR; } @Override diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaCrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaCrdOperatorIT.java index 34796d57dca..fd6fee238b0 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaCrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaCrdOperatorIT.java @@ -10,7 +10,7 @@ import io.strimzi.api.kafka.model.kafka.KafkaList; import io.strimzi.api.kafka.model.kafka.listener.GenericKafkaListenerBuilder; import io.strimzi.api.kafka.model.kafka.listener.KafkaListenerType; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.apache.logging.log4j.LogManager; @@ -37,7 +37,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA; + return CrdUtils.CRD_KAFKA; } @Override diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMaker2CrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMaker2CrdOperatorIT.java index 42b438505b3..2bda2394bec 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMaker2CrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMaker2CrdOperatorIT.java @@ -8,7 +8,7 @@ import io.strimzi.api.kafka.model.mirrormaker2.KafkaMirrorMaker2; import io.strimzi.api.kafka.model.mirrormaker2.KafkaMirrorMaker2Builder; import io.strimzi.api.kafka.model.mirrormaker2.KafkaMirrorMaker2List; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.apache.logging.log4j.LogManager; @@ -35,7 +35,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA_MIRROR_MAKER_2; + return CrdUtils.CRD_KAFKA_MIRROR_MAKER_2; } @Override diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMakerCrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMakerCrdOperatorIT.java index d46fbc85b73..890b7a36900 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMakerCrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/KafkaMirrorMakerCrdOperatorIT.java @@ -9,7 +9,7 @@ import io.strimzi.api.kafka.model.mirrormaker.KafkaMirrorMaker; import io.strimzi.api.kafka.model.mirrormaker.KafkaMirrorMakerBuilder; import io.strimzi.api.kafka.model.mirrormaker.KafkaMirrorMakerList; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.junit5.VertxExtension; import io.vertx.junit5.VertxTestContext; import org.apache.logging.log4j.LogManager; @@ -36,7 +36,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA_MIRROR_MAKER; + return CrdUtils.CRD_KAFKA_MIRROR_MAKER; } @Override diff --git a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/StrimziPodSetCrdOperatorIT.java b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/StrimziPodSetCrdOperatorIT.java index 12101d0789d..61062927fec 100644 --- a/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/StrimziPodSetCrdOperatorIT.java +++ b/cluster-operator/src/test/java/io/strimzi/operator/cluster/operator/resource/kubernetes/StrimziPodSetCrdOperatorIT.java @@ -16,7 +16,7 @@ import io.strimzi.api.kafka.model.podset.StrimziPodSetBuilder; import io.strimzi.api.kafka.model.podset.StrimziPodSetList; import io.strimzi.operator.common.Reconciliation; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.vertx.core.Promise; import io.vertx.junit5.Checkpoint; import io.vertx.junit5.Timeout; @@ -55,7 +55,7 @@ protected StrimziPodSetOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_STRIMZI_POD_SET; + return CrdUtils.CRD_STRIMZI_POD_SET; } @Override diff --git a/mockkube/src/main/java/io/strimzi/test/mockkube3/MockKube3.java b/mockkube/src/main/java/io/strimzi/test/mockkube3/MockKube3.java index 69898a20e4d..e72597d005b 100644 --- a/mockkube/src/main/java/io/strimzi/test/mockkube3/MockKube3.java +++ b/mockkube/src/main/java/io/strimzi/test/mockkube3/MockKube3.java @@ -19,7 +19,7 @@ import io.strimzi.api.kafka.Crds; import io.strimzi.api.kafka.model.kafka.Kafka; import io.strimzi.api.kafka.model.nodepool.KafkaNodePool; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import io.strimzi.test.mockkube3.controllers.AbstractMockController; import io.strimzi.test.mockkube3.controllers.MockDeletionController; import io.strimzi.test.mockkube3.controllers.MockDeploymentController; @@ -260,7 +260,7 @@ public MockKube3Builder withDeletionController() { * @return MockKube builder instance */ public MockKube3Builder withKafkaCrd() { - mock.registerCrd(TestUtils.CRD_KAFKA); + mock.registerCrd(CrdUtils.CRD_KAFKA); return this; } @@ -270,7 +270,7 @@ public MockKube3Builder withKafkaCrd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaTopicCrd() { - mock.registerCrd(TestUtils.CRD_TOPIC); + mock.registerCrd(CrdUtils.CRD_TOPIC); return this; } @@ -280,7 +280,7 @@ public MockKube3Builder withKafkaTopicCrd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaUserCrd() { - mock.registerCrd(TestUtils.CRD_KAFKA_USER); + mock.registerCrd(CrdUtils.CRD_KAFKA_USER); return this; } @@ -290,7 +290,7 @@ public MockKube3Builder withKafkaUserCrd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaConnectCrd() { - mock.registerCrd(TestUtils.CRD_KAFKA_CONNECT); + mock.registerCrd(CrdUtils.CRD_KAFKA_CONNECT); return this; } @@ -300,7 +300,7 @@ public MockKube3Builder withKafkaConnectCrd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaConnectorCrd() { - mock.registerCrd(TestUtils.CRD_KAFKA_CONNECTOR); + mock.registerCrd(CrdUtils.CRD_KAFKA_CONNECTOR); return this; } @@ -310,7 +310,7 @@ public MockKube3Builder withKafkaConnectorCrd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaMirrorMaker2Crd() { - mock.registerCrd(TestUtils.CRD_KAFKA_MIRROR_MAKER_2); + mock.registerCrd(CrdUtils.CRD_KAFKA_MIRROR_MAKER_2); return this; } @@ -320,7 +320,7 @@ public MockKube3Builder withKafkaMirrorMaker2Crd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaRebalanceCrd() { - mock.registerCrd(TestUtils.CRD_KAFKA_REBALANCE); + mock.registerCrd(CrdUtils.CRD_KAFKA_REBALANCE); return this; } @@ -330,7 +330,7 @@ public MockKube3Builder withKafkaRebalanceCrd() { * @return MockKube builder instance */ public MockKube3Builder withKafkaNodePoolCrd() { - mock.registerCrd(TestUtils.CRD_KAFKA_NODE_POOL); + mock.registerCrd(CrdUtils.CRD_KAFKA_NODE_POOL); return this; } @@ -340,7 +340,7 @@ public MockKube3Builder withKafkaNodePoolCrd() { * @return MockKube builder instance */ public MockKube3Builder withStrimziPodSetCrd() { - mock.registerCrd(TestUtils.CRD_STRIMZI_POD_SET); + mock.registerCrd(CrdUtils.CRD_STRIMZI_POD_SET); return this; } diff --git a/operator-common/src/test/java/io/strimzi/operator/common/auth/PemAuthIdentityTest.java b/operator-common/src/test/java/io/strimzi/operator/common/auth/PemAuthIdentityTest.java index f69a3fe98f6..acb9fa7f985 100644 --- a/operator-common/src/test/java/io/strimzi/operator/common/auth/PemAuthIdentityTest.java +++ b/operator-common/src/test/java/io/strimzi/operator/common/auth/PemAuthIdentityTest.java @@ -10,7 +10,8 @@ import io.strimzi.operator.common.operator.MockCertManager; import org.junit.jupiter.api.Test; -import static io.strimzi.test.TestUtils.map; +import java.util.Map; + import static java.util.Collections.emptyMap; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -40,7 +41,7 @@ public void testSecretWithMissingCertChainThrowsException() { .withName(KafkaResources.clusterOperatorCertsSecretName(CLUSTER)) .withNamespace(NAMESPACE) .endMetadata() - .withData(map("cluster-operator.key", "key")) + .withData(Map.of("cluster-operator.key", "key")) .build(); Exception e = assertThrows(RuntimeException.class, () -> PemAuthIdentity.clusterOperator(secretWithMissingClusterOperatorKey)); assertThat(e.getMessage(), is("The Secret testns/testcluster-cluster-operator-certs is missing the field cluster-operator.crt")); @@ -53,7 +54,7 @@ public void testSecretCorrupted() { .withName(KafkaResources.clusterOperatorCertsSecretName(CLUSTER)) .withNamespace(NAMESPACE) .endMetadata() - .withData(map("cluster-operator.key", MockCertManager.clusterCaKey(), + .withData(Map.of("cluster-operator.key", MockCertManager.clusterCaKey(), "cluster-operator.crt", "bm90YWNlcnQ=", //notacert "cluster-operator.p12", "bm90YXRydXN0c3RvcmU=", //notatruststore "cluster-operator.password", "bm90YXBhc3N3b3Jk")) //notapassword diff --git a/operator-common/src/test/java/io/strimzi/operator/common/auth/PemTrustSetTest.java b/operator-common/src/test/java/io/strimzi/operator/common/auth/PemTrustSetTest.java index fc242e0079e..192601d5cbd 100644 --- a/operator-common/src/test/java/io/strimzi/operator/common/auth/PemTrustSetTest.java +++ b/operator-common/src/test/java/io/strimzi/operator/common/auth/PemTrustSetTest.java @@ -11,7 +11,6 @@ import java.util.Map; -import static io.strimzi.test.TestUtils.map; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -27,7 +26,7 @@ public void testSecretCorrupted() { .withName(KafkaResources.clusterOperatorCertsSecretName(CLUSTER)) .withNamespace(NAMESPACE) .endMetadata() - .withData(map("ca.crt", "notacert")) + .withData(Map.of("ca.crt", "notacert")) .build(); PemTrustSet pemTrustSet = new PemTrustSet(secretWithBadCertificate); Exception e = assertThrows(RuntimeException.class, pemTrustSet::jksTrustStore); diff --git a/operator-common/src/test/java/io/strimzi/operator/common/model/ResourceVisitorTest.java b/operator-common/src/test/java/io/strimzi/operator/common/model/ResourceVisitorTest.java index fa338d75da1..3e3ad6ead36 100644 --- a/operator-common/src/test/java/io/strimzi/operator/common/model/ResourceVisitorTest.java +++ b/operator-common/src/test/java/io/strimzi/operator/common/model/ResourceVisitorTest.java @@ -6,7 +6,7 @@ import io.strimzi.api.kafka.model.kafka.Kafka; import io.strimzi.operator.common.Reconciliation; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.junit.jupiter.api.Test; import java.lang.reflect.AnnotatedElement; @@ -21,7 +21,7 @@ public class ResourceVisitorTest { @Test public void testDoesNotThrow() { - Kafka k = TestUtils.fromYaml("/example.yaml", Kafka.class, true); + Kafka k = ReadWriteUtils.readObjectFromYamlFileInResources("/example.yaml", Kafka.class, true); assertThat(k, is(notNullValue())); ResourceVisitor.visit(new Reconciliation("test", "kind", "namespace", "name"), k, new ResourceVisitor.Visitor() { @Override @@ -38,7 +38,7 @@ public void visitObject(Reconciliation reconciliation, List path, Object @Test public void testDoesNotThrowWithListenerList() { - Kafka k = TestUtils.fromYaml("/example2.yaml", Kafka.class, true); + Kafka k = ReadWriteUtils.readObjectFromYamlFileInResources("/example2.yaml", Kafka.class, true); assertThat(k, is(notNullValue())); ResourceVisitor.visit(new Reconciliation("test", "kind", "namespace", "name"), k, new ResourceVisitor.Visitor() { @Override diff --git a/operator-common/src/test/java/io/strimzi/operator/common/model/ValidationVisitorTest.java b/operator-common/src/test/java/io/strimzi/operator/common/model/ValidationVisitorTest.java index 35e9214087c..e6bd22a4a36 100644 --- a/operator-common/src/test/java/io/strimzi/operator/common/model/ValidationVisitorTest.java +++ b/operator-common/src/test/java/io/strimzi/operator/common/model/ValidationVisitorTest.java @@ -9,7 +9,7 @@ import io.strimzi.api.kafka.model.kafka.Kafka; import io.strimzi.api.kafka.model.kafka.KafkaBuilder; import io.strimzi.operator.common.Reconciliation; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.logging.TestLogger; import org.apache.logging.log4j.Level; import org.junit.jupiter.api.Test; @@ -27,7 +27,7 @@ public class ValidationVisitorTest { @Test public void testValidationErrorsAreLogged() { - Kafka k = TestUtils.fromYaml("/example.yaml", Kafka.class, true); + Kafka k = ReadWriteUtils.readObjectFromYamlFileInResources("/example.yaml", Kafka.class, true); assertThat(k, is(notNullValue())); TestLogger logger = TestLogger.create(ValidationVisitorTest.class); HasMetadata resource = new KafkaBuilder() diff --git a/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/AbstractCustomResourceOperatorIT.java b/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/AbstractCustomResourceOperatorIT.java index 21984208344..ebb3abb8638 100644 --- a/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/AbstractCustomResourceOperatorIT.java +++ b/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/AbstractCustomResourceOperatorIT.java @@ -13,6 +13,7 @@ import io.strimzi.api.kafka.model.common.ConditionBuilder; import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.common.Util; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -72,13 +73,13 @@ public void before() { LOGGER.info("Created namespace"); LOGGER.info("Creating CRD"); - TestUtils.createCrd(client, getCrdName(), getCrd()); + CrdUtils.createCrd(client, getCrdName(), getCrd()); LOGGER.info("Created CRD"); } @AfterAll public void after() { - TestUtils.deleteCrd(client, getCrdName()); + CrdUtils.deleteCrd(client, getCrdName()); TestUtils.deleteNamespace(client, getNamespace()); client.close(); diff --git a/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/KafkaUserCrdOperatorIT.java b/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/KafkaUserCrdOperatorIT.java index f9fe652f705..04241ab4e1d 100644 --- a/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/KafkaUserCrdOperatorIT.java +++ b/operator-common/src/test/java/io/strimzi/operator/common/operator/resource/concurrent/KafkaUserCrdOperatorIT.java @@ -8,7 +8,7 @@ import io.strimzi.api.kafka.model.user.KafkaUser; import io.strimzi.api.kafka.model.user.KafkaUserBuilder; import io.strimzi.api.kafka.model.user.KafkaUserList; -import io.strimzi.test.TestUtils; +import io.strimzi.test.CrdUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -33,7 +33,7 @@ protected CrdOperator operator() { @Override protected String getCrd() { - return TestUtils.CRD_KAFKA_USER; + return CrdUtils.CRD_KAFKA_USER; } @Override diff --git a/systemtest/src/main/java/io/strimzi/systemtest/logs/LogCollector.java b/systemtest/src/main/java/io/strimzi/systemtest/logs/LogCollector.java index 685a13e3511..22844de50b0 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/logs/LogCollector.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/logs/LogCollector.java @@ -33,7 +33,7 @@ import java.util.Locale; import java.util.Set; -import static io.strimzi.test.TestUtils.writeFile; +import static io.strimzi.test.ReadWriteUtils.writeFile; import static io.strimzi.test.k8s.KubeClusterResource.cmdKubeClient; /** diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/draincleaner/SetupDrainCleaner.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/draincleaner/SetupDrainCleaner.java index 75a1efdb6c1..933b542502f 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/draincleaner/SetupDrainCleaner.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/draincleaner/SetupDrainCleaner.java @@ -23,6 +23,7 @@ import io.strimzi.systemtest.security.SystemTestCertAndKey; import io.strimzi.systemtest.security.SystemTestCertManager; import io.strimzi.systemtest.utils.kubeUtils.objects.SecretUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.k8s.KubeClusterResource; import org.apache.logging.log4j.LogManager; @@ -85,7 +86,7 @@ public void applyInstallFiles() { switch (resourceType) { case TestConstants.ROLE: - Role role = TestUtils.configFromYaml(file, Role.class); + Role role = ReadWriteUtils.readObjectFromYamlFilepath(file, Role.class); ResourceManager.getInstance().createResourceWithWait(new RoleBuilder(role) .editMetadata() .withNamespace(TestConstants.DRAIN_CLEANER_NAMESPACE) @@ -93,7 +94,7 @@ public void applyInstallFiles() { .build()); break; case TestConstants.ROLE_BINDING: - RoleBinding roleBinding = TestUtils.configFromYaml(file, RoleBinding.class); + RoleBinding roleBinding = ReadWriteUtils.readObjectFromYamlFilepath(file, RoleBinding.class); ResourceManager.getInstance().createResourceWithWait(new RoleBindingBuilder(roleBinding) .editMetadata() .withNamespace(TestConstants.DRAIN_CLEANER_NAMESPACE) @@ -104,11 +105,11 @@ public void applyInstallFiles() { .build()); break; case TestConstants.CLUSTER_ROLE: - ClusterRole clusterRole = TestUtils.configFromYaml(file, ClusterRole.class); + ClusterRole clusterRole = ReadWriteUtils.readObjectFromYamlFilepath(file, ClusterRole.class); ResourceManager.getInstance().createResourceWithWait(clusterRole); break; case TestConstants.SERVICE_ACCOUNT: - ServiceAccount serviceAccount = TestUtils.configFromYaml(file, ServiceAccount.class); + ServiceAccount serviceAccount = ReadWriteUtils.readObjectFromYamlFilepath(file, ServiceAccount.class); ResourceManager.getInstance().createResourceWithWait(new ServiceAccountBuilder(serviceAccount) .editMetadata() .withNamespace(TestConstants.DRAIN_CLEANER_NAMESPACE) @@ -116,18 +117,18 @@ public void applyInstallFiles() { .build()); break; case TestConstants.CLUSTER_ROLE_BINDING: - ClusterRoleBinding clusterRoleBinding = TestUtils.configFromYaml(file, ClusterRoleBinding.class); + ClusterRoleBinding clusterRoleBinding = ReadWriteUtils.readObjectFromYamlFilepath(file, ClusterRoleBinding.class); ResourceManager.getInstance().createResourceWithWait(new ClusterRoleBindingBuilder(clusterRoleBinding).build()); break; case TestConstants.SECRET: ResourceManager.getInstance().createResourceWithWait(customDrainCleanerSecret); break; case TestConstants.SERVICE: - Service service = TestUtils.configFromYaml(file, Service.class); + Service service = ReadWriteUtils.readObjectFromYamlFilepath(file, Service.class); ResourceManager.getInstance().createResourceWithWait(service); break; case TestConstants.VALIDATION_WEBHOOK_CONFIG: - ValidatingWebhookConfiguration webhookConfiguration = TestUtils.configFromYaml(file, ValidatingWebhookConfiguration.class); + ValidatingWebhookConfiguration webhookConfiguration = ReadWriteUtils.readObjectFromYamlFilepath(file, ValidatingWebhookConfiguration.class); // in case that we are running on OpenShift-like cluster, we are not creating the Secret, thus this step is not needed if (customDrainCleanerSecret != null) { diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/jaeger/SetupJaeger.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/jaeger/SetupJaeger.java index 31f222769cc..1a1099dedc6 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/jaeger/SetupJaeger.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/jaeger/SetupJaeger.java @@ -13,11 +13,11 @@ import io.strimzi.systemtest.resources.ResourceManager; import io.strimzi.systemtest.resources.kubernetes.NetworkPolicyResource; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Map; @@ -174,7 +174,7 @@ private static void deployJaegerOperator() { public static void deployJaegerInstance(String namespaceName) { LOGGER.info("=== Applying jaeger instance install file ==="); - String instanceYamlContent = TestUtils.getContent(new File(JAEGER_INSTANCE_PATH), TestUtils::toYamlString); + String instanceYamlContent = ReadWriteUtils.readFile(JAEGER_INSTANCE_PATH); TestUtils.waitFor("Jaeger Instance deploy", JAEGER_DEPLOYMENT_POLL, JAEGER_DEPLOYMENT_TIMEOUT, () -> { try { diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/ClusterRoleBindingResource.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/ClusterRoleBindingResource.java index 0afe73e75c5..29a4bad0ee9 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/ClusterRoleBindingResource.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/ClusterRoleBindingResource.java @@ -9,7 +9,7 @@ import io.strimzi.systemtest.TestConstants; import io.strimzi.systemtest.resources.ResourceManager; import io.strimzi.systemtest.resources.ResourceType; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.k8s.KubeClusterResource; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -69,7 +69,7 @@ public static ClusterRoleBinding clusterRoleBinding(ClusterRoleBinding clusterRo } private static ClusterRoleBinding getClusterRoleBindingFromYaml(String yamlPath) { - return TestUtils.configFromYaml(yamlPath, ClusterRoleBinding.class); + return ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, ClusterRoleBinding.class); } } diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/DeploymentResource.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/DeploymentResource.java index 24e8b0da151..5df5d9e5efd 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/DeploymentResource.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/DeploymentResource.java @@ -10,7 +10,7 @@ import io.strimzi.systemtest.resources.ResourceManager; import io.strimzi.systemtest.resources.ResourceType; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import java.util.function.Consumer; @@ -51,6 +51,6 @@ public static void replaceDeployment(String deploymentName, Consumer } public static Deployment getDeploymentFromYaml(String yamlPath) { - return TestUtils.configFromYaml(yamlPath, Deployment.class); + return ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, Deployment.class); } } diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleBindingResource.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleBindingResource.java index 56b53a44c17..72f5e68ec1f 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleBindingResource.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleBindingResource.java @@ -9,7 +9,7 @@ import io.strimzi.systemtest.TestConstants; import io.strimzi.systemtest.resources.ResourceManager; import io.strimzi.systemtest.resources.ResourceType; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -60,6 +60,6 @@ public static void roleBinding(String yamlPath, String namespace, String clientN } private static RoleBinding getRoleBindingFromYaml(String yamlPath) { - return TestUtils.configFromYaml(yamlPath, RoleBinding.class); + return ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, RoleBinding.class); } } diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleResource.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleResource.java index 6761ae85b2a..9f20f12c282 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleResource.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/RoleResource.java @@ -9,7 +9,7 @@ import io.strimzi.systemtest.TestConstants; import io.strimzi.systemtest.resources.ResourceManager; import io.strimzi.systemtest.resources.ResourceType; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -56,6 +56,6 @@ public static void role(String yamlPath, String namespace) { } private static Role getRoleFromYaml(String yamlPath) { - return TestUtils.configFromYaml(yamlPath, Role.class); + return ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, Role.class); } } diff --git a/systemtest/src/main/java/io/strimzi/systemtest/resources/operator/SetupClusterOperator.java b/systemtest/src/main/java/io/strimzi/systemtest/resources/operator/SetupClusterOperator.java index f2225e55ac3..07c93a4ef11 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/resources/operator/SetupClusterOperator.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/resources/operator/SetupClusterOperator.java @@ -39,6 +39,7 @@ import io.strimzi.systemtest.templates.kubernetes.ClusterRoleBindingTemplates; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; import io.strimzi.systemtest.utils.specific.OlmUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.executor.Exec; import io.strimzi.test.k8s.KubeClusterResource; @@ -579,7 +580,7 @@ public String changeLeaseNameInResourceIfNeeded(String yamlPath) { switch (resourceEntry.getKey()) { case TestConstants.ROLE: - RoleBuilder roleBuilder = new RoleBuilder(TestUtils.configFromYaml(yamlPath, Role.class)) + RoleBuilder roleBuilder = new RoleBuilder(ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, Role.class)) .editMetadata() .withName(resourceName) .endMetadata() @@ -587,10 +588,10 @@ public String changeLeaseNameInResourceIfNeeded(String yamlPath) { .withResourceNames(leaseEnvVar.getValue()) .endRule(); - tmpFileContent = TestUtils.toYamlString(roleBuilder.build()); + tmpFileContent = ReadWriteUtils.writeObjectToYamlString(roleBuilder.build()); break; case TestConstants.CLUSTER_ROLE: - ClusterRoleBuilder clusterRoleBuilder = new ClusterRoleBuilder(TestUtils.configFromYaml(yamlPath, ClusterRole.class)) + ClusterRoleBuilder clusterRoleBuilder = new ClusterRoleBuilder(ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, ClusterRole.class)) .editMetadata() .withName(resourceName) .endMetadata() @@ -599,10 +600,10 @@ public String changeLeaseNameInResourceIfNeeded(String yamlPath) { .withResourceNames(leaseEnvVar.getValue()) .endRule(); - tmpFileContent = TestUtils.toYamlString(clusterRoleBuilder.build()); + tmpFileContent = ReadWriteUtils.writeObjectToYamlString(clusterRoleBuilder.build()); break; case TestConstants.ROLE_BINDING: - RoleBindingBuilder roleBindingBuilder = new RoleBindingBuilder(TestUtils.configFromYaml(yamlPath, RoleBinding.class)) + RoleBindingBuilder roleBindingBuilder = new RoleBindingBuilder(ReadWriteUtils.readObjectFromYamlFilepath(yamlPath, RoleBinding.class)) .editMetadata() .withName(resourceName) .endMetadata() @@ -610,13 +611,13 @@ public String changeLeaseNameInResourceIfNeeded(String yamlPath) { .withName(resourceName) .endRoleRef(); - tmpFileContent = TestUtils.toYamlString(roleBindingBuilder.build()); + tmpFileContent = ReadWriteUtils.writeObjectToYamlString(roleBindingBuilder.build()); break; default: return yamlPath; } - TestUtils.writeFile(tmpFile.toPath(), tmpFileContent); + ReadWriteUtils.writeFile(tmpFile.toPath(), tmpFileContent); return tmpFile.getAbsolutePath(); } catch (IOException e) { throw new RuntimeException(e); @@ -649,7 +650,7 @@ public void applyClusterOperatorInstallFiles(String namespace) { switch (resourceType) { case TestConstants.ROLE: if (!this.isRolesAndBindingsManagedByAnUser()) { - Role role = TestUtils.configFromYaml(createFile, Role.class); + Role role = ReadWriteUtils.readObjectFromYamlFilepath(createFile, Role.class); ResourceManager.getInstance().createResourceWithWait(new RoleBuilder(role) .editMetadata() .withNamespace(namespace) @@ -659,12 +660,12 @@ public void applyClusterOperatorInstallFiles(String namespace) { break; case TestConstants.CLUSTER_ROLE: if (!this.isRolesAndBindingsManagedByAnUser()) { - ClusterRole clusterRole = TestUtils.configFromYaml(changeLeaseNameInResourceIfNeeded(createFile.getAbsolutePath()), ClusterRole.class); + ClusterRole clusterRole = ReadWriteUtils.readObjectFromYamlFilepath(changeLeaseNameInResourceIfNeeded(createFile.getAbsolutePath()), ClusterRole.class); ResourceManager.getInstance().createResourceWithWait(clusterRole); } break; case TestConstants.SERVICE_ACCOUNT: - ServiceAccount serviceAccount = TestUtils.configFromYaml(createFile, ServiceAccount.class); + ServiceAccount serviceAccount = ReadWriteUtils.readObjectFromYamlFilepath(createFile, ServiceAccount.class); ResourceManager.getInstance().createResourceWithWait(new ServiceAccountBuilder(serviceAccount) .editMetadata() .withNamespace(namespace) @@ -672,7 +673,7 @@ public void applyClusterOperatorInstallFiles(String namespace) { .build()); break; case TestConstants.CONFIG_MAP: - ConfigMap configMap = TestUtils.configFromYaml(createFile, ConfigMap.class); + ConfigMap configMap = ReadWriteUtils.readObjectFromYamlFilepath(createFile, ConfigMap.class); ResourceManager.getInstance().createResourceWithWait(new ConfigMapBuilder(configMap) .editMetadata() .withNamespace(namespace) @@ -692,7 +693,7 @@ public void applyClusterOperatorInstallFiles(String namespace) { .build()); break; case TestConstants.CUSTOM_RESOURCE_DEFINITION_SHORT: - CustomResourceDefinition customResourceDefinition = TestUtils.configFromYaml(createFile, CustomResourceDefinition.class); + CustomResourceDefinition customResourceDefinition = ReadWriteUtils.readObjectFromYamlFilepath(createFile, CustomResourceDefinition.class); ResourceManager.getInstance().createResourceWithWait(customResourceDefinition); break; default: @@ -714,7 +715,7 @@ public File switchClusterRolesToRolesIfNeeded(File oldFile, boolean explicitConv fileNameArr[1] = "Role"; final String changeFileName = Arrays.stream(fileNameArr).map(item -> "-" + item).collect(Collectors.joining()).substring(1); File tmpFile = Files.createTempFile(changeFileName.replace(".yaml", ""), ".yaml").toFile(); - TestUtils.writeFile(tmpFile.toPath(), TestUtils.readFile(oldFile).replace("ClusterRole", "Role")); + ReadWriteUtils.writeFile(tmpFile.toPath(), ReadWriteUtils.readFile(oldFile).replace("ClusterRole", "Role")); LOGGER.info("Replaced ClusterRole for Role in {}", oldFile.getAbsolutePath()); return tmpFile; diff --git a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java index 7a1b7863213..6185db83917 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java @@ -18,7 +18,7 @@ import io.strimzi.api.kafka.model.kafka.KafkaResources; import io.strimzi.systemtest.Environment; import io.strimzi.systemtest.TestConstants; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.k8s.KubeClusterResource; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -68,7 +68,7 @@ public static KafkaConnectBuilder kafkaConnectWithMetricsAndFileSinkPlugin( } public static ConfigMap connectMetricsConfigMap(String namespaceName, String kafkaConnectClusterName) { - return new ConfigMapBuilder(TestUtils.configMapFromYaml(TestConstants.PATH_TO_KAFKA_CONNECT_METRICS_CONFIG, "connect-metrics")) + return new ConfigMapBuilder(ReadWriteUtils.readObjectFromYamlFilepath(TestConstants.PATH_TO_KAFKA_CONNECT_METRICS_CONFIG, ConfigMap.class)) .editOrNewMetadata() .withNamespace(namespaceName) .withName(getConfigMapName(kafkaConnectClusterName)) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java index 79de1f26941..4a313386e99 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java @@ -17,7 +17,7 @@ import io.strimzi.systemtest.TestConstants; import io.strimzi.systemtest.storage.TestStorage; import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; public class KafkaMirrorMaker2Templates { @@ -66,7 +66,7 @@ public static KafkaMirrorMaker2Builder kafkaMirrorMaker2WithMetrics( } public static ConfigMap mirrorMaker2MetricsConfigMap(String namespaceName, String kafkaMirrorMaker2Name) { - return new ConfigMapBuilder(TestUtils.configMapFromYaml(TestConstants.PATH_TO_KAFKA_MIRROR_MAKER_2_METRICS_CONFIG, "mirror-maker-2-metrics")) + return new ConfigMapBuilder(ReadWriteUtils.readObjectFromYamlFilepath(TestConstants.PATH_TO_KAFKA_MIRROR_MAKER_2_METRICS_CONFIG, ConfigMap.class)) .editOrNewMetadata() .withNamespace(namespaceName) .withName(getConfigMapName(kafkaMirrorMaker2Name)) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java index 2d2ea2574b3..75219c118b7 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java @@ -19,7 +19,7 @@ import io.strimzi.systemtest.Environment; import io.strimzi.systemtest.TestConstants; import io.strimzi.systemtest.utils.TestKafkaVersion; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import java.util.Collections; @@ -183,9 +183,7 @@ public static KafkaBuilder kafkaWithMetricsAndCruiseControlWithMetrics(String na public static ConfigMap kafkaMetricsConfigMap(String namespaceName, String kafkaClusterName) { String configMapName = kafkaClusterName + METRICS_KAFKA_CONFIG_MAP_SUFFIX; - ConfigMap kafkaMetricsCm = TestUtils.configMapFromYaml(TestConstants.PATH_TO_KAFKA_METRICS_CONFIG, "kafka-metrics"); - - return new ConfigMapBuilder(kafkaMetricsCm) + return new ConfigMapBuilder(ReadWriteUtils.readObjectFromYamlFilepath(TestConstants.PATH_TO_KAFKA_METRICS_CONFIG, ConfigMap.class)) .editMetadata() .withName(configMapName) .withNamespace(namespaceName) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/utils/specific/BridgeUtils.java b/systemtest/src/main/java/io/strimzi/systemtest/utils/specific/BridgeUtils.java index ff7f332878f..f17e547eec0 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/utils/specific/BridgeUtils.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/utils/specific/BridgeUtils.java @@ -4,7 +4,7 @@ */ package io.strimzi.systemtest.utils.specific; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import io.vertx.core.http.HttpMethod; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -76,6 +76,6 @@ public static String getHeaderValue(String expectedHeader, String response) { */ public static String getBridgeVersion() { InputStream bridgeVersionInputStream = BridgeUtils.class.getResourceAsStream("/bridge.version"); - return TestUtils.readResource(bridgeVersionInputStream).replace("\n", ""); + return ReadWriteUtils.readInputStream(bridgeVersionInputStream).replace("\n", ""); } } diff --git a/systemtest/src/main/java/io/strimzi/test/executor/Exec.java b/systemtest/src/main/java/io/strimzi/test/executor/Exec.java index 69e0d8de37e..8b1ba3e3334 100644 --- a/systemtest/src/main/java/io/strimzi/test/executor/Exec.java +++ b/systemtest/src/main/java/io/strimzi/test/executor/Exec.java @@ -45,49 +45,64 @@ public class Exec { private static final int MAXIMUM_EXEC_LOG_CHARACTER_SIZE = Integer.parseInt(System.getenv().getOrDefault("STRIMZI_EXEC_MAX_LOG_OUTPUT_CHARACTERS", "20000")); private static final Object LOCK = new Object(); - public Process process; + private Process process; private String stdOut; private String stdErr; private StreamGobbler stdOutReader; private StreamGobbler stdErrReader; private Path logPath; - private boolean appendLineSeparator; + private final boolean appendLineSeparator; + /** + * Constructor + */ public Exec() { this.appendLineSeparator = true; } + /** + * Constructor + * + * @param logPath Path where the log should be stored + */ public Exec(Path logPath) { this.appendLineSeparator = true; this.logPath = logPath; } + /** + * Constructor + * + * @param appendLineSeparator Indicates whether line separator should be appended or not + */ public Exec(boolean appendLineSeparator) { this.appendLineSeparator = appendLineSeparator; } /** - * Getter for stdOutput - * - * @return string stdOut + * @return Standard output of the command */ public String out() { return stdOut; } /** - * Getter for stdErrorOutput - * - * @return string stdErr + * @return Error output of the command */ public String err() { return stdErr; } + /** + * @return Indicates whether the command is running or not + */ public boolean isRunning() { return process.isAlive(); } + /** + * @return The return code of the command or -1 if it is still running + */ public int getRetCode() { LOGGER.info("Process: {}", process); if (isRunning()) { @@ -97,12 +112,12 @@ public int getRetCode() { } } - /** * Method executes external command * - * @param command arguments for command - * @return execution results + * @param command The command and its arguments + * + * @return Result of the execution */ public static ExecResult exec(String... command) { return exec(Arrays.asList(command)); @@ -111,8 +126,10 @@ public static ExecResult exec(String... command) { /** * Method executes external command * - * @param command arguments for command - * @return execution results + * @param level Output log level + * @param command The command and its arguments + * + * @return Result of the execution */ public static ExecResult exec(Level level, String... command) { List commands = new ArrayList<>(Arrays.asList(command)); @@ -122,8 +139,9 @@ public static ExecResult exec(Level level, String... command) { /** * Method executes external command * - * @param command arguments for command - * @return execution results + * @param command The list with command and its arguments + * + * @return Result of the execution */ public static ExecResult exec(List command) { return exec(null, command, 0, Level.DEBUG); @@ -132,8 +150,10 @@ public static ExecResult exec(List command) { /** * Method executes external command * - * @param command arguments for command - * @return execution results + * @param input The input that will be passed to the command + * @param command The list with command and its arguments + * + * @return Result of the execution */ public static ExecResult exec(String input, List command) { return exec(input, command, 0, Level.DEBUG); @@ -141,10 +161,13 @@ public static ExecResult exec(String input, List command) { /** * Method executes external command - * @param command arguments for command - * @param timeout timeout for execution - * @param logLevel log output level - * @return execution results + * + * @param input The input that will be passed to the command + * @param command The list with command and its arguments + * @param timeout Timeout for the execution after which it will be killed + * @param logLevel Output log level + * + * @return Result of the execution */ public static ExecResult exec(String input, List command, int timeout, Level logLevel) { return exec(input, command, timeout, logLevel, true); @@ -152,11 +175,14 @@ public static ExecResult exec(String input, List command, int timeout, L /** * Method executes external command - * @param command arguments for command - * @param timeout timeout for execution - * @param logLevel log output level - * @param throwErrors look for errors in output and throws exception if true - * @return execution results + * + * @param input The input that will be passed to the command + * @param command The list with command and its arguments + * @param timeout Timeout for the execution after which it will be killed + * @param logLevel Output log level + * @param throwErrors Enabled the check for errors which will throw an exception if some error is found + * + * @return Result of the execution */ public static ExecResult exec(String input, List command, int timeout, Level logLevel, boolean throwErrors) { int ret = 1; @@ -237,12 +263,15 @@ public static ExecResult exec(String input, List command, int timeout, L /** * Method executes external command * - * @param commands arguments for command - * @param timeoutMs timeout in ms for kill - * @return returns ecode of execution - * @throws IOException When writing/closing the OutputStreams or starting the process - * @throws InterruptedException In the waitFor method, if the wait is interrupted - * @throws ExecutionException When getting the output from std + * @param input The input that will be passed to the command + * @param commands The list with command and its arguments + * @param timeoutMs Timeout for the command, after which it will be killed + * + * @return returns Return code of the execution + * + * @throws IOException Is thrown when some IO operation fails + * @throws InterruptedException Is thrown when the execution is interrupted + * @throws ExecutionException Is thrown when the execution fails */ public int execute(String input, List commands, long timeoutMs) throws IOException, InterruptedException, ExecutionException { LOGGER.trace("Running command - " + join(" ", commands.toArray(new String[0]))); @@ -291,7 +320,7 @@ public int execute(String input, List commands, long timeoutMs) throws I } /** - * Method kills process + * Stops the command execution */ public void stop() { process.destroyForcibly(); @@ -300,9 +329,7 @@ public void stop() { } /** - * Get standard output of execution - * - * @return future string output + * @return Future with the standard output of the execution */ private Future readStdOutput() { stdOutReader = new StreamGobbler(process.getInputStream()); @@ -310,9 +337,7 @@ private Future readStdOutput() { } /** - * Get standard error output of execution - * - * @return future string error output + * @return Future with the error output of the execution */ private Future readStdError() { stdErrReader = new StreamGobbler(process.getErrorStream()); @@ -335,9 +360,11 @@ private void storeOutputsToFile() { } /** - * Check if command is executable - * @param cmd command - * @return true.false + * Check if a command is executable + * + * @param cmd The command that should be checked + * + * @return Returns true if the command can be executed. False otherwise. */ public static boolean isExecutableOnPath(String cmd) { var osName = System.getProperty("os.name"); @@ -355,7 +382,9 @@ public static boolean isExecutableOnPath(String cmd) { /** * This method check the size of executor output log and cut it if it's too long. - * @param log executor log + * + * @param log The log of the executor + * * @return updated log if size is too big */ public static String cutExecutorLog(String log) { diff --git a/systemtest/src/main/java/io/strimzi/test/executor/ExecResult.java b/systemtest/src/main/java/io/strimzi/test/executor/ExecResult.java index be607c447bc..3552578e227 100644 --- a/systemtest/src/main/java/io/strimzi/test/executor/ExecResult.java +++ b/systemtest/src/main/java/io/strimzi/test/executor/ExecResult.java @@ -6,6 +6,9 @@ import java.io.Serializable; +/** + * Result of an execution of an command + */ public class ExecResult implements Serializable { private static final long serialVersionUID = 1L; @@ -20,18 +23,30 @@ public class ExecResult implements Serializable { this.stdErr = stdErr; } + /** + * @return True if the command succeeded. False otherwise. + */ public boolean exitStatus() { return returnCode == 0; } + /** + * @return The command return code + */ public int returnCode() { return returnCode; } + /** + * @return The standard output of the command + */ public String out() { return stdOut; } + /** + * @return The error output of the command + */ public String err() { return stdErr; } diff --git a/systemtest/src/test/java/io/strimzi/systemtest/connect/ConnectST.java b/systemtest/src/test/java/io/strimzi/systemtest/connect/ConnectST.java index 31662cc3562..91652104018 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/connect/ConnectST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/connect/ConnectST.java @@ -64,6 +64,7 @@ import io.strimzi.systemtest.utils.kafkaUtils.KafkaConnectorUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.StrimziPodSetUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.vertx.core.json.JsonObject; import org.apache.logging.log4j.LogManager; @@ -141,7 +142,7 @@ void testDeployRollUndeploy() { RollingUpdateUtils.waitTillComponentHasRolled(testStorage.getNamespaceName(), testStorage.getKafkaConnectSelector(), connectReplicasCount, connectPodsSnapshot); final String podName = PodUtils.getPodNameByPrefix(testStorage.getNamespaceName(), KafkaConnectResources.componentName(testStorage.getClusterName())); - final String kafkaPodJson = TestUtils.toJsonString(kubeClient(testStorage.getNamespaceName()).getPod(podName)); + final String kafkaPodJson = ReadWriteUtils.writeObjectToJsonString(kubeClient(testStorage.getNamespaceName()).getPod(podName)); assertThat(kafkaPodJson, hasJsonPath(StUtils.globalVariableJsonPathBuilder(0, "KAFKA_CONNECT_BOOTSTRAP_SERVERS"), hasItem(KafkaResources.tlsBootstrapAddress(testStorage.getClusterName())))); diff --git a/systemtest/src/test/java/io/strimzi/systemtest/log/LoggingChangeST.java b/systemtest/src/test/java/io/strimzi/systemtest/log/LoggingChangeST.java index 6477a4ff000..93698e798a8 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/log/LoggingChangeST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/log/LoggingChangeST.java @@ -51,6 +51,7 @@ import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; @@ -1410,7 +1411,7 @@ && cmdKubeClient().namespace(testStorage.getNamespaceName()).execInPod(kafkaMM2P @ParallelNamespaceTest void testNotExistingCMSetsDefaultLogging() { final TestStorage testStorage = new TestStorage(ResourceManager.getTestContext()); - final String defaultProps = TestUtils.getFileAsString(TestUtils.USER_PATH + "/../cluster-operator/src/main/resources/default-logging/KafkaCluster.properties"); + final String defaultProps = ReadWriteUtils.readFile(TestUtils.USER_PATH + "/../cluster-operator/src/main/resources/default-logging/KafkaCluster.properties"); String cmData = "log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender\n" + "log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout\n" + diff --git a/systemtest/src/test/java/io/strimzi/systemtest/mirrormaker/MirrorMaker2ST.java b/systemtest/src/test/java/io/strimzi/systemtest/mirrormaker/MirrorMaker2ST.java index a079993d200..b961bc9325c 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/mirrormaker/MirrorMaker2ST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/mirrormaker/MirrorMaker2ST.java @@ -54,6 +54,7 @@ import io.strimzi.systemtest.utils.kubeUtils.controllers.StrimziPodSetUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.SecretUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -148,7 +149,7 @@ void testMirrorMaker2() { ClientUtils.waitForInstantClientSuccess(testStorage); String podName = PodUtils.getPodNameByPrefix(testStorage.getNamespaceName(), KafkaMirrorMaker2Resources.componentName(testStorage.getClusterName())); - String kafkaPodJson = TestUtils.toJsonString(kubeClient().getPod(testStorage.getNamespaceName(), podName)); + String kafkaPodJson = ReadWriteUtils.writeObjectToJsonString(kubeClient().getPod(testStorage.getNamespaceName(), podName)); assertThat(kafkaPodJson, hasJsonPath(StUtils.globalVariableJsonPathBuilder(0, "KAFKA_CONNECT_BOOTSTRAP_SERVERS"), hasItem(KafkaResources.plainBootstrapAddress(testStorage.getTargetClusterName())))); diff --git a/systemtest/src/test/java/io/strimzi/systemtest/operators/user/UserST.java b/systemtest/src/test/java/io/strimzi/systemtest/operators/user/UserST.java index 33cf335bdc3..61d8dc20fa7 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/operators/user/UserST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/operators/user/UserST.java @@ -40,6 +40,7 @@ import io.strimzi.systemtest.utils.kafkaUtils.KafkaUserUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.SecretUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -105,7 +106,7 @@ void testUpdateUser() { resourceManager.createResourceWithWait(KafkaUserTemplates.tlsUser(Environment.TEST_SUITE_NAMESPACE, testStorage.getKafkaUsername(), sharedTestStorage.getClusterName()).build()); - String kafkaUserSecret = TestUtils.toJsonString(kubeClient(Environment.TEST_SUITE_NAMESPACE).getSecret(testStorage.getKafkaUsername())); + String kafkaUserSecret = ReadWriteUtils.writeObjectToJsonString(kubeClient(Environment.TEST_SUITE_NAMESPACE).getSecret(testStorage.getKafkaUsername())); assertThat(kafkaUserSecret, hasJsonPath("$.data['ca.crt']", notNullValue())); assertThat(kafkaUserSecret, hasJsonPath("$.data['user.crt']", notNullValue())); assertThat(kafkaUserSecret, hasJsonPath("$.data['user.key']", notNullValue())); @@ -113,7 +114,7 @@ void testUpdateUser() { assertThat(kafkaUserSecret, hasJsonPath("$.metadata.namespace", equalTo(Environment.TEST_SUITE_NAMESPACE))); KafkaUser kUser = KafkaUserResource.kafkaUserClient().inNamespace(Environment.TEST_SUITE_NAMESPACE).withName(testStorage.getKafkaUsername()).get(); - String kafkaUserAsJson = TestUtils.toJsonString(kUser); + String kafkaUserAsJson = ReadWriteUtils.writeObjectToJsonString(kUser); assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.name", equalTo(testStorage.getKafkaUsername()))); assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.namespace", equalTo(Environment.TEST_SUITE_NAMESPACE))); @@ -136,12 +137,12 @@ void testUpdateUser() { KafkaUserUtils.waitForKafkaUserIncreaseObserverGeneration(Environment.TEST_SUITE_NAMESPACE, observedGeneration, testStorage.getKafkaUsername()); KafkaUserUtils.waitForKafkaUserCreation(Environment.TEST_SUITE_NAMESPACE, testStorage.getKafkaUsername()); - String anotherKafkaUserSecret = TestUtils.toJsonString(kubeClient(Environment.TEST_SUITE_NAMESPACE).getSecret(Environment.TEST_SUITE_NAMESPACE, testStorage.getKafkaUsername())); + String anotherKafkaUserSecret = ReadWriteUtils.writeObjectToJsonString(kubeClient(Environment.TEST_SUITE_NAMESPACE).getSecret(Environment.TEST_SUITE_NAMESPACE, testStorage.getKafkaUsername())); assertThat(anotherKafkaUserSecret, hasJsonPath("$.data.password", notNullValue())); kUser = Crds.kafkaUserOperation(kubeClient().getClient()).inNamespace(Environment.TEST_SUITE_NAMESPACE).withName(testStorage.getKafkaUsername()).get(); - kafkaUserAsJson = TestUtils.toJsonString(kUser); + kafkaUserAsJson = ReadWriteUtils.writeObjectToJsonString(kUser); assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.name", equalTo(testStorage.getKafkaUsername()))); assertThat(kafkaUserAsJson, hasJsonPath("$.metadata.namespace", equalTo(Environment.TEST_SUITE_NAMESPACE))); assertThat(kafkaUserAsJson, hasJsonPath("$.spec.authentication.type", equalTo("scram-sha-512"))); diff --git a/systemtest/src/test/java/io/strimzi/systemtest/security/SecurityST.java b/systemtest/src/test/java/io/strimzi/systemtest/security/SecurityST.java index 3907eb543ab..390eb2ece2b 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/security/SecurityST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/security/SecurityST.java @@ -57,6 +57,7 @@ import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.SecretUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.apache.kafka.common.config.SslConfigs; import org.apache.kafka.common.errors.GroupAuthorizationException; @@ -603,7 +604,7 @@ void testAutoRenewCaCertsTriggerByExpiredCertificate() { // 1. Create the Secrets already, and a certificate that's already expired InputStream secretInputStream = getClass().getClassLoader().getResourceAsStream("security-st-certs/expired-cluster-ca.crt"); - String clusterCaCert = TestUtils.readResource(secretInputStream); + String clusterCaCert = ReadWriteUtils.readInputStream(secretInputStream); SecretUtils.createSecret(testStorage.getNamespaceName(), clusterCaCertificateSecretName(testStorage.getClusterName()), "ca.crt", clusterCaCert); // 2. Now create a cluster diff --git a/systemtest/src/test/java/io/strimzi/systemtest/specific/SpecificST.java b/systemtest/src/test/java/io/strimzi/systemtest/specific/SpecificST.java index f6282ebdade..631882bdd78 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/specific/SpecificST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/specific/SpecificST.java @@ -24,7 +24,7 @@ import io.strimzi.systemtest.templates.crd.KafkaTemplates; import io.strimzi.systemtest.templates.kubernetes.ClusterRoleBindingTemplates; import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.hamcrest.CoreMatchers; @@ -53,27 +53,27 @@ void testClusterWideOperatorWithLimitedAccessToSpecificNamespaceViaRbacRole() { final String namespaceWhereCreationOfCustomResourcesIsApproved = "example-1"; // --- a) defining Role and ClusterRoles - final Role strimziClusterOperator020 = TestUtils.configFromYaml(SetupClusterOperator.getInstance().switchClusterRolesToRolesIfNeeded(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/020-ClusterRole-strimzi-cluster-operator-role.yaml"), true), Role.class); + final Role strimziClusterOperator020 = ReadWriteUtils.readObjectFromYamlFilepath(SetupClusterOperator.getInstance().switchClusterRolesToRolesIfNeeded(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/020-ClusterRole-strimzi-cluster-operator-role.yaml"), true), Role.class); // specify explicit namespace for Role (for ClusterRole we do not specify namespace because ClusterRole is a non-namespaced resource strimziClusterOperator020.getMetadata().setNamespace(namespaceWhereCreationOfCustomResourcesIsApproved); - final ClusterRole strimziClusterOperator021 = TestUtils.configFromYaml(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/021-ClusterRole-strimzi-cluster-operator-role.yaml"), ClusterRole.class); - final ClusterRole strimziClusterOperator022 = TestUtils.configFromYaml(SetupClusterOperator.getInstance().changeLeaseNameInResourceIfNeeded(new File( + final ClusterRole strimziClusterOperator021 = ReadWriteUtils.readObjectFromYamlFilepath(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/021-ClusterRole-strimzi-cluster-operator-role.yaml"), ClusterRole.class); + final ClusterRole strimziClusterOperator022 = ReadWriteUtils.readObjectFromYamlFilepath(SetupClusterOperator.getInstance().changeLeaseNameInResourceIfNeeded(new File( TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/022-ClusterRole-strimzi-cluster-operator-role.yaml").getAbsolutePath()), ClusterRole.class); - final ClusterRole strimziClusterOperator023 = TestUtils.configFromYaml(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/023-ClusterRole-strimzi-cluster-operator-role.yaml"), ClusterRole.class); - final ClusterRole strimziClusterOperator030 = TestUtils.configFromYaml(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/030-ClusterRole-strimzi-kafka-broker.yaml"), ClusterRole.class); - final ClusterRole strimziClusterOperator031 = TestUtils.configFromYaml(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/031-ClusterRole-strimzi-entity-operator.yaml"), ClusterRole.class); - final ClusterRole strimziClusterOperator033 = TestUtils.configFromYaml(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/033-ClusterRole-strimzi-kafka-client.yaml"), ClusterRole.class); + final ClusterRole strimziClusterOperator023 = ReadWriteUtils.readObjectFromYamlFilepath(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/023-ClusterRole-strimzi-cluster-operator-role.yaml"), ClusterRole.class); + final ClusterRole strimziClusterOperator030 = ReadWriteUtils.readObjectFromYamlFilepath(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/030-ClusterRole-strimzi-kafka-broker.yaml"), ClusterRole.class); + final ClusterRole strimziClusterOperator031 = ReadWriteUtils.readObjectFromYamlFilepath(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/031-ClusterRole-strimzi-entity-operator.yaml"), ClusterRole.class); + final ClusterRole strimziClusterOperator033 = ReadWriteUtils.readObjectFromYamlFilepath(new File(TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/033-ClusterRole-strimzi-kafka-client.yaml"), ClusterRole.class); final List roles = Arrays.asList(strimziClusterOperator020); final List clusterRoles = Arrays.asList(strimziClusterOperator021, strimziClusterOperator022, strimziClusterOperator023, strimziClusterOperator030, strimziClusterOperator031, strimziClusterOperator033); // ---- b) defining RoleBindings - final RoleBinding strimziClusterOperator020Namespaced = TestUtils.configFromYaml(SetupClusterOperator.getInstance().switchClusterRolesToRolesIfNeeded(new File( + final RoleBinding strimziClusterOperator020Namespaced = ReadWriteUtils.readObjectFromYamlFilepath(SetupClusterOperator.getInstance().switchClusterRolesToRolesIfNeeded(new File( TestConstants.PATH_TO_PACKAGING_INSTALL_FILES + "/cluster-operator/020-RoleBinding-strimzi-cluster-operator.yaml"), true), RoleBinding.class); - final RoleBinding strimziClusterOperator022LeaderElection = TestUtils.configFromYaml(SetupClusterOperator.getInstance().changeLeaseNameInResourceIfNeeded(new File( + final RoleBinding strimziClusterOperator022LeaderElection = ReadWriteUtils.readObjectFromYamlFilepath(SetupClusterOperator.getInstance().changeLeaseNameInResourceIfNeeded(new File( TestConstants.PATH_TO_LEASE_ROLE_BINDING).getAbsolutePath()), RoleBinding.class); // specify explicit namespace for RoleBindings diff --git a/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java b/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java index e1426cf7dee..fd5946f052a 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java @@ -46,6 +46,7 @@ import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import io.strimzi.test.k8s.KubeClusterResource; import org.apache.logging.log4j.LogManager; @@ -166,7 +167,7 @@ protected void changeKafkaVersion(String componentsNamespaceName, CommonVersionM kafkaTopicYaml = new File(examplesPath + "/examples/topic/kafka-topic.yaml"); LOGGER.info("Deploying KafkaTopic from: {}", kafkaTopicYaml.getPath()); - cmdKubeClient(componentsNamespaceName).applyContent(TestUtils.readFile(kafkaTopicYaml)); + cmdKubeClient(componentsNamespaceName).applyContent(ReadWriteUtils.readFile(kafkaTopicYaml)); // ####################################################################### @@ -312,7 +313,7 @@ protected void modifyApplyClusterOperatorWithCRDsFromFile(String clusterOperator } else if (f.getName().matches(".*Deployment.*")) { cmdKubeClient(clusterOperatorNamespaceName).replaceContent(StUtils.changeDeploymentConfiguration(componentsNamespaceName, f, strimziFeatureGatesValue)); } else { - cmdKubeClient(clusterOperatorNamespaceName).replaceContent(TestUtils.getContent(f, TestUtils::toYamlString)); + cmdKubeClient(clusterOperatorNamespaceName).replaceContent(ReadWriteUtils.readFile(f)); } }); } @@ -412,7 +413,7 @@ protected void setupEnvAndUpgradeClusterOperator(String clusterOperatorNamespace String pathToTopicExamples = upgradeData.getFromExamples().equals("HEAD") ? PATH_TO_KAFKA_TOPIC_CONFIG : upgradeData.getFromExamples() + "/examples/topic/kafka-topic.yaml"; kafkaTopicYaml = new File(dir, pathToTopicExamples); - cmdKubeClient(testStorage.getNamespaceName()).applyContent(TestUtils.getContent(kafkaTopicYaml, TestUtils::toYamlString) + cmdKubeClient(testStorage.getNamespaceName()).applyContent(ReadWriteUtils.readFile(kafkaTopicYaml) .replace("name: \"my-topic\"", "name: \"" + testStorage.getTopicName() + "\"") .replace("partitions: 1", "partitions: 3") .replace("replicas: 1", "replicas: 3") + @@ -445,7 +446,7 @@ private String getKafkaYamlWithName(String name) { String initialName = "name: \"my-topic\""; String newName = "name: \"%s\"".formatted(name); - return TestUtils.getContent(kafkaTopicYaml, TestUtils::toYamlString).replace(initialName, newName); + return ReadWriteUtils.readFile(kafkaTopicYaml).replace(initialName, newName); } protected void verifyProcedure(String componentsNamespaceNames, BundleVersionModificationData upgradeData, String producerName, String consumerName, boolean wasUTOUsedBefore) { @@ -601,7 +602,7 @@ protected void deployKafkaConnectAndKafkaConnectorWithWaitForReadiness( final String imageFullPath = Environment.getImageOutputRegistry(testStorage.getNamespaceName(), TestConstants.ST_CONNECT_BUILD_IMAGE_NAME, String.valueOf(new Random().nextInt(Integer.MAX_VALUE))); - KafkaConnect kafkaConnect = new KafkaConnectBuilder(TestUtils.configFromYaml(kafkaConnectYaml, KafkaConnect.class)) + KafkaConnect kafkaConnect = new KafkaConnectBuilder(ReadWriteUtils.readObjectFromYamlFilepath(kafkaConnectYaml, KafkaConnect.class)) .editMetadata() .withName(clusterName) .addToAnnotations(Annotations.STRIMZI_IO_USE_CONNECTOR_RESOURCES, "true") @@ -621,7 +622,7 @@ protected void deployKafkaConnectAndKafkaConnectorWithWaitForReadiness( LOGGER.info("Deploying KafkaConnect from: {}", kafkaConnectYaml.getPath()); - cmdKubeClient(testStorage.getNamespaceName()).applyContent(TestUtils.toYamlString(kafkaConnect)); + cmdKubeClient(testStorage.getNamespaceName()).applyContent(ReadWriteUtils.writeObjectToYamlString(kafkaConnect)); ResourceManager.waitForResourceReadiness(testStorage.getNamespaceName(), getResourceApiVersion(KafkaConnect.RESOURCE_PLURAL), kafkaConnect.getMetadata().getName()); // in our examples is no sink connector and thus we are using the same as in HEAD verification diff --git a/systemtest/src/test/java/io/strimzi/systemtest/upgrade/kraft/AbstractKRaftUpgradeST.java b/systemtest/src/test/java/io/strimzi/systemtest/upgrade/kraft/AbstractKRaftUpgradeST.java index 334c86b93cb..f402a8a7a7e 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/upgrade/kraft/AbstractKRaftUpgradeST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/upgrade/kraft/AbstractKRaftUpgradeST.java @@ -22,7 +22,7 @@ import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; import io.strimzi.systemtest.utils.kubeUtils.controllers.DeploymentUtils; import io.strimzi.systemtest.utils.kubeUtils.objects.PodUtils; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.junit.jupiter.api.AfterEach; @@ -218,7 +218,7 @@ protected void applyCustomResourcesFromPath(String namespaceName, String example kafkaTopicYaml = new File(examplesPath + "/examples/topic/kafka-topic.yaml"); LOGGER.info("Deploying KafkaTopic from: {}, in Namespace {}", kafkaTopicYaml.getPath(), namespaceName); - cmdKubeClient(namespaceName).applyContent(TestUtils.readFile(kafkaTopicYaml)); + cmdKubeClient(namespaceName).applyContent(ReadWriteUtils.readFile(kafkaTopicYaml)); } @BeforeEach diff --git a/test/src/main/java/io/strimzi/test/CrdUtils.java b/test/src/main/java/io/strimzi/test/CrdUtils.java new file mode 100644 index 00000000000..b6b73f45b25 --- /dev/null +++ b/test/src/main/java/io/strimzi/test/CrdUtils.java @@ -0,0 +1,122 @@ +/* + * Copyright Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.test; + +import io.fabric8.kubernetes.api.model.DeletionPropagation; +import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.client.KubernetesClient; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +/** + * Class with methods and fields useful for testing CRD related things + */ +public final class CrdUtils { + /** + * Path to the KafkaTopic CRD definition YAML + */ + public static final String CRD_TOPIC = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/043-Crd-kafkatopic.yaml"; + + /** + * Path to the Kafka CRD definition YAML + */ + public static final String CRD_KAFKA = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/040-Crd-kafka.yaml"; + + /** + * Path to the KafkaConnect CRD definition YAML + */ + public static final String CRD_KAFKA_CONNECT = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/041-Crd-kafkaconnect.yaml"; + + /** + * Path to the KafkaUser CRD definition YAML + */ + public static final String CRD_KAFKA_USER = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/044-Crd-kafkauser.yaml"; + + /** + * Path to the KafkaMirrorMaker CRD definition YAML + */ + public static final String CRD_KAFKA_MIRROR_MAKER = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/045-Crd-kafkamirrormaker.yaml"; + + /** + * Path to the KafkaBridge CRD definition YAML + */ + public static final String CRD_KAFKA_BRIDGE = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/046-Crd-kafkabridge.yaml"; + + /** + * Path to the KafkaMirrorMaker2 CRD definition YAML + */ + public static final String CRD_KAFKA_MIRROR_MAKER_2 = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/048-Crd-kafkamirrormaker2.yaml"; + + /** + * Path to the KafkaConnector CRD definition YAML + */ + public static final String CRD_KAFKA_CONNECTOR = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/047-Crd-kafkaconnector.yaml"; + + /** + * Path to the KafkaRebalance CRD definition YAML + */ + public static final String CRD_KAFKA_REBALANCE = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/049-Crd-kafkarebalance.yaml"; + + /** + * Path to the KafkaNodePool CRD definition YAML + */ + public static final String CRD_KAFKA_NODE_POOL = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/04A-Crd-kafkanodepool.yaml"; + + /** + * Path to the StrimziPodSet CRD definition YAML + */ + public static final String CRD_STRIMZI_POD_SET = TestUtils.USER_PATH + "/../packaging/install/cluster-operator/042-Crd-strimzipodset.yaml"; + + private CrdUtils() { } + + /** + * Creates a CRD resource in the Kubernetes cluster + * + * @param client Kubernetes client + * @param crdName Name of the CRD + * @param crdPath Path to the CRD YAML + */ + public static void createCrd(KubernetesClient client, String crdName, String crdPath) { + if (client.apiextensions().v1().customResourceDefinitions().withName(crdName).get() != null) { + deleteCrd(client, crdName); + } + + client.apiextensions().v1() + .customResourceDefinitions() + .load(crdPath) + .create(); + client.apiextensions().v1() + .customResourceDefinitions() + .load(crdPath) + .waitUntilCondition(CrdUtils::isCrdEstablished, 10, TimeUnit.SECONDS); + } + + /** + * Checks if the CRD has been established + * + * @param crd The CRD resource + * + * @return True if the CRD is established. False otherwise. + */ + private static boolean isCrdEstablished(CustomResourceDefinition crd) { + return crd.getStatus() != null + && crd.getStatus().getConditions() != null + && crd.getStatus().getConditions().stream().anyMatch(c -> "Established".equals(c.getType()) && "True".equals(c.getStatus())); + } + + /** + * Deletes the CRD from the Kubernetes cluster + * + * @param client Kubernetes client + * @param crdName Name of the CRD + */ + public static void deleteCrd(KubernetesClient client, String crdName) { + if (client.apiextensions().v1().customResourceDefinitions().withName(crdName).get() != null) { + client.apiextensions().v1().customResourceDefinitions().withName(crdName).withPropagationPolicy(DeletionPropagation.BACKGROUND).delete(); + client.apiextensions().v1().customResourceDefinitions().withName(crdName).waitUntilCondition(Objects::isNull, 30_000, TimeUnit.MILLISECONDS); + } + } +} diff --git a/test/src/main/java/io/strimzi/test/ReadWriteUtils.java b/test/src/main/java/io/strimzi/test/ReadWriteUtils.java new file mode 100644 index 00000000000..6be5e193feb --- /dev/null +++ b/test/src/main/java/io/strimzi/test/ReadWriteUtils.java @@ -0,0 +1,318 @@ +/* + * Copyright Strimzi authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.strimzi.test; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.exc.InvalidFormatException; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; + +import static java.lang.String.format; +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * Class with various utility methods for reading and writing files and objects + */ +public final class ReadWriteUtils { + private static final Logger LOGGER = LogManager.getLogger(ReadWriteUtils.class); + + private ReadWriteUtils() { + // All static methods + } + + /** + * Reads file frm the disk and returns it as a String + * + * @param file The file that should be read + * + * @return String with file content + */ + public static String readFile(File file) { + try { + if (file == null) { + return null; + } else { + return Files.readString(file.toPath()); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Reads file frm the disk and returns it as a String + * + * @param filePath Path to the file that should be read + * + * @return String with file content + */ + public static String readFile(String filePath) { + return readFile(new File(filePath)); + } + + /** + * Read the classpath resource with the given resourceName and return the content as a String + * + * @param cls The class relative to which the resource will be loaded. + * @param resourceName The name of the file stored in resources + * + * @return The resource content + */ + public static String readFileFromResources(Class cls, String resourceName) { + try { + URL url = cls.getResource(resourceName); + if (url == null) { + return null; + } else { + return Files.readString(Paths.get( + url.toURI())); + } + } catch (IOException | URISyntaxException e) { + throw new RuntimeException(e); + } + } + + /** + * Reads an InputStream and returns its content as a String + * + * @param stream InputStreams that should be read + * + * @return String with the InputStream content + */ + public static String readInputStream(InputStream stream) { + StringBuilder textBuilder = new StringBuilder(); + try (Reader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { + int character; + + while ((character = reader.read()) != -1) { + textBuilder.append((char) character); + } + } catch (IOException e) { + LOGGER.warn("Failed to read from InputStream", e); + } + + return textBuilder.toString(); + } + + /** + * Reads an object from a YAML file stored in resources + * + * @param resource The name of the file in resources + * @param c The class from which resource path should be the file loaded + * + * @return An object instance read from the file + * + * @param Type of the object + */ + public static T readObjectFromYamlFileInResources(String resource, Class c) { + return readObjectFromYamlFileInResources(resource, c, false); + } + + /** + * Reads an object from a YAML file stored in resources + * + * @param resource The name of the file in resources + * @param c The class from which resource path should be the file loaded + * @param ignoreUnknownProperties Defines whether unknown properties should be ignored or if this method should + * fail with an exception + * + * @return An object instance from the file + * + * @param Type of the resource + */ + public static T readObjectFromYamlFileInResources(String resource, Class c, boolean ignoreUnknownProperties) { + URL url = c.getResource(resource); + if (url == null) { + return null; + } + ObjectMapper mapper = new YAMLMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, !ignoreUnknownProperties); + try { + return mapper.readValue(url, c); + } catch (InvalidFormatException e) { + throw new IllegalArgumentException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Reads an object from YAML string + * + * @param yamlContent String with the YAML of the object + * @param c The class representing the object + * + * @return Returns the object instance based on the YAML + * + * @param Type of the object + */ + public static T readObjectFromYamlString(String yamlContent, Class c) { + try { + ObjectMapper mapper = new YAMLMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); + return mapper.readValue(yamlContent, c); + } catch (InvalidFormatException e) { + throw new IllegalArgumentException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Reads an object from a YAML file + * + * @param yamlFile File with the YAML + * @param c The class representing the object + * + * @return Returns the object instance based on the YAML file + * + * @param Type of the object + */ + public static T readObjectFromYamlFilepath(File yamlFile, Class c) { + try { + ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); + return mapper.readValue(yamlFile, c); + } catch (InvalidFormatException e) { + throw new IllegalArgumentException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Reads an object from a path to a YAML file + * + * @param yamlPath Path to a YAML file + * @param c The class representing the object + * + * @return Returns the object instance based on the YAML file path + * + * @param Type of the object + */ + public static T readObjectFromYamlFilepath(String yamlPath, Class c) { + return readObjectFromYamlFilepath(new File(yamlPath), c); + } + + /** + * Converts an object into YAML + * + * @param instance The resource that should be converted to YAML + * + * @return String with the YAML representation of the object + * + * @param Type of the object + */ + public static String writeObjectToYamlString(T instance) { + try { + ObjectMapper mapper = new YAMLMapper() + .disable(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID) + .setSerializationInclusion(JsonInclude.Include.NON_EMPTY); + return mapper.writeValueAsString(instance); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + /** + * Converts an object into YAML + * + * @param instance The resource that should be converted to YAML + * + * @return String with the YAML representation of the object + * + * @param Type of the object + */ + public static String writeObjectToJsonString(T instance) { + try { + ObjectMapper mapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + return mapper.writeValueAsString(instance); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + /** + * Writes text into a file + * + * @param filePath Path of the file where the text will be written + * @param text The text that will be written into the file + */ + public static void writeFile(Path filePath, String text) { + try { + Files.writeString(filePath, text, StandardCharsets.UTF_8); + } catch (IOException e) { + LOGGER.warn("Exception during writing text in file", e); + } + } + + /** + * Creates an empty file in the default temporary-file directory, using the given prefix and suffix. + * + * @param prefix The prefix of the empty file (default: UUID). + * @param suffix The suffix of the empty file (default: .tmp). + * + * @return The empty file just created. + */ + public static File tempFile(String prefix, String suffix) { + File file; + prefix = prefix == null ? UUID.randomUUID().toString() : prefix; + suffix = suffix == null ? ".tmp" : suffix; + try { + file = Files.createTempFile(prefix, suffix).toFile(); + } catch (IOException e) { + throw new RuntimeException(e); + } + file.deleteOnExit(); + return file; + } + + /** + * Get JSON content as string from resource file. + * + * TODO: Does the special handling here really matter? Can't we just use redFileFromResources? + * + * @param resourcePath Resource path. + * + * @return JSON content as string. + */ + public static String readSingleLineJsonStringFromResourceFile(String resourcePath) { + try { + URI resourceURI = Objects.requireNonNull(TestUtils.class.getClassLoader().getResource(resourcePath)).toURI(); + try (Stream lines = Files.lines(Paths.get(resourceURI), UTF_8)) { + Optional content = lines.reduce((x, y) -> x + y); + + if (content.isEmpty()) { + throw new IOException(format("File %s from resources was empty", resourcePath)); + } + + return content.get(); + } + } catch (Throwable t) { + throw new RuntimeException(t); + } + } +} diff --git a/test/src/main/java/io/strimzi/test/TestUtils.java b/test/src/main/java/io/strimzi/test/TestUtils.java index 659768a67ce..a954d5487e2 100644 --- a/test/src/main/java/io/strimzi/test/TestUtils.java +++ b/test/src/main/java/io/strimzi/test/TestUtils.java @@ -4,141 +4,90 @@ */ package io.strimzi.test; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.exc.InvalidFormatException; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLParser; -import io.fabric8.kubernetes.api.model.ConfigMap; import io.fabric8.kubernetes.api.model.DeletionPropagation; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.NamespaceBuilder; import io.fabric8.kubernetes.api.model.OwnerReference; -import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; import io.fabric8.kubernetes.client.KubernetesClient; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; import java.io.PrintWriter; -import java.io.Reader; import java.io.StringWriter; import java.net.ServerSocket; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; import java.time.Duration; -import java.util.AbstractMap; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.StringTokenizer; -import java.util.UUID; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.LockSupport; import java.util.function.BooleanSupplier; -import java.util.function.Function; -import java.util.stream.Stream; -import static java.lang.String.format; -import static java.nio.charset.StandardCharsets.UTF_8; import static java.util.Arrays.asList; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.nullValue; -@SuppressWarnings({"checkstyle:ClassFanOutComplexity"}) +/** + * Class with various utility methods and fields useful for testing + */ public final class TestUtils { - private static final Logger LOGGER = LogManager.getLogger(TestUtils.class); - public static final String USER_PATH = System.getProperty("user.dir"); - - public static final String LINE_SEPARATOR = System.lineSeparator(); - - public static final String CRD_TOPIC = USER_PATH + "/../packaging/install/cluster-operator/043-Crd-kafkatopic.yaml"; - - public static final String CRD_KAFKA = USER_PATH + "/../packaging/install/cluster-operator/040-Crd-kafka.yaml"; - - public static final String CRD_KAFKA_CONNECT = USER_PATH + "/../packaging/install/cluster-operator/041-Crd-kafkaconnect.yaml"; - - public static final String CRD_KAFKA_USER = USER_PATH + "/../packaging/install/cluster-operator/044-Crd-kafkauser.yaml"; - - public static final String CRD_KAFKA_MIRROR_MAKER = USER_PATH + "/../packaging/install/cluster-operator/045-Crd-kafkamirrormaker.yaml"; - - public static final String CRD_KAFKA_BRIDGE = USER_PATH + "/../packaging/install/cluster-operator/046-Crd-kafkabridge.yaml"; - - public static final String CRD_KAFKA_MIRROR_MAKER_2 = USER_PATH + "/../packaging/install/cluster-operator/048-Crd-kafkamirrormaker2.yaml"; - - public static final String CRD_KAFKA_CONNECTOR = USER_PATH + "/../packaging/install/cluster-operator/047-Crd-kafkaconnector.yaml"; - - public static final String CRD_KAFKA_REBALANCE = USER_PATH + "/../packaging/install/cluster-operator/049-Crd-kafkarebalance.yaml"; - - public static final String CRD_KAFKA_NODE_POOL = USER_PATH + "/../packaging/install/cluster-operator/04A-Crd-kafkanodepool.yaml"; - - public static final String CRD_STRIMZI_POD_SET = USER_PATH + "/../packaging/install/cluster-operator/042-Crd-strimzipodset.yaml"; - /** - * Default timeout for asynchronous tests. + * Path to the user directory from which the tests are run */ - public static final int DEFAULT_TIMEOUT_DURATION = 30; + public static final String USER_PATH = System.getProperty("user.dir"); /** - * Default timeout unit for asynchronous tests. + * The default line separator for the platform where the tests are run */ - public static final TimeUnit DEFAULT_TIMEOUT_UNIT = TimeUnit.SECONDS; + public static final String LINE_SEPARATOR = System.lineSeparator(); private TestUtils() { // All static methods } - /** Returns a Map of the given sequence of key, value pairs. */ - @SafeVarargs - public static Map map(T... pairs) { - if (pairs.length % 2 != 0) { - throw new IllegalArgumentException(); - } - Map result = new HashMap<>(pairs.length / 2); - for (int i = 0; i < pairs.length; i += 2) { - result.put(pairs[i], pairs[i + 1]); - } - return result; - } - /** * Poll the given {@code ready} function every {@code pollIntervalMs} milliseconds until it returns true, * or throw a WaitException if it doesn't return true within {@code timeoutMs} milliseconds. - * @return The remaining time left until timeout occurs - * (helpful if you have several calls which need to share a common timeout), - * */ + * + * @param description Description of what we are waiting for (used for logging purposes) + * @param pollIntervalMs Poll interval in milliseconds + * @param timeoutMs Timeout interval in milliseconds + * @param ready Supplier to decide if the wait is complete or not + * + * @return The remaining time left until timeout occurs (helpful if you have several calls which need to share a common timeout) + */ public static long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready) { return waitFor(description, pollIntervalMs, timeoutMs, ready, () -> { }); } + /** + * Poll the given {@code ready} function every {@code pollIntervalMs} milliseconds until it returns true, + * or throw a WaitException if it doesn't return true within {@code timeoutMs} milliseconds. + * + * @param description Description of what we are waiting for (used for logging purposes) + * @param pollIntervalMs Poll interval in milliseconds + * @param timeoutMs Timeout interval in milliseconds + * @param ready Supplier to decide if the wait is complete or not + * @param onTimeout Runnable that will be run when the timeout is reached + * + * @return The remaining time left until timeout occurs (helpful if you have several calls which need to share a common timeout) + */ public static long waitFor(String description, long pollIntervalMs, long timeoutMs, BooleanSupplier ready, Runnable onTimeout) { LOGGER.debug("Waiting for {}", description); long deadline = System.currentTimeMillis() + timeoutMs; @@ -195,196 +144,79 @@ public static long waitFor(String description, long pollIntervalMs, long timeout } } - public static String indent(String s) { + /** + * Indents the input string with for empty spaces at the beginning of each line. + * + * TODO: This is used only in system tests and should be moved there. + * + * @param input Input string that should be indented + * + * @return Indented string + */ + public static String indent(String input) { StringBuilder sb = new StringBuilder(); - String[] lines = s.split("[\n\r]"); + String[] lines = input.split("[\n\r]"); + for (String line : lines) { sb.append(" ").append(line).append(System.lineSeparator()); } - return sb.toString(); - } - public static String getFileAsString(String filePath) { - try { - LOGGER.info(filePath); - return Files.readString(Paths.get(filePath)); - } catch (IOException e) { - LOGGER.info("File with path {} not found", filePath); - } - return ""; + return sb.toString(); } /** - * Read the classpath resource with the given resourceName and return the content as a String - * @param cls The class relative to which the resource will be loaded. - * @param resourceName The name of the resource - * @return The resource content + * Creates a modifiable set wit the desired elements. Use {@code Set.of()} if immutable set is sufficient. + * + * @param elements The elements that will be added to the Set + * + * @return Modifiable set with the elements + * + * @param Type of the elements stored in the Set */ - public static String readResource(Class cls, String resourceName) { - try { - URL url = cls.getResource(resourceName); - if (url == null) { - return null; - } else { - return Files.readString(Paths.get( - url.toURI())); - } - } catch (IOException | URISyntaxException e) { - throw new RuntimeException(e); - } + @SafeVarargs + public static Set modifiableSet(T... elements) { + return new HashSet<>(asList(elements)); } /** - * Read loaded resource as an InputStream and return the content as a String - * @param stream Loaded resource - * @return The resource content + * Creates a modifiable map wit the desired elements. Use {@code Map.of()} if immutable set is sufficient. + * + * @param pairs The key-value pairs that should be added to the Map + * + * @return Modifiable map with the desired key-value pairs + * + * @param Type of the keys and values */ - public static String readResource(InputStream stream) { - StringBuilder textBuilder = new StringBuilder(); - try (Reader reader = new BufferedReader(new InputStreamReader( - stream, StandardCharsets.UTF_8) - )) { - int character; - while ((character = reader.read()) != -1) { - textBuilder.append((char) character); - } - } catch (IOException e) { - LOGGER.warn("Failed to read from InputStream", e); - } - return textBuilder.toString(); - } - - public static String readFile(File file) { - try { - if (file == null) { - return null; - } else { - return Files.readString(file.toPath()); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - @SafeVarargs - public static Set set(T... elements) { - return new HashSet<>(asList(elements)); - } - - public static T fromYaml(String resource, Class c) { - return fromYaml(resource, c, false); - } - - public static T fromYaml(String resource, Class c, boolean ignoreUnknownProperties) { - URL url = c.getResource(resource); - if (url == null) { - return null; - } - ObjectMapper mapper = new YAMLMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, !ignoreUnknownProperties); - try { - return mapper.readValue(url, c); - } catch (InvalidFormatException e) { - throw new IllegalArgumentException(e); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - public static T fromYamlString(String yamlContent, Class c) { - return fromYamlString(yamlContent, c, false); - } - - public static T fromYamlString(String yamlContent, Class c, boolean ignoreUnknownProperties) { - ObjectMapper mapper = new YAMLMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, !ignoreUnknownProperties); - try { - return mapper.readValue(yamlContent, c); - } catch (InvalidFormatException e) { - throw new IllegalArgumentException(e); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - public static String toYamlString(T instance) { - ObjectMapper mapper = new YAMLMapper() - .disable(YAMLGenerator.Feature.USE_NATIVE_TYPE_ID) - .setSerializationInclusion(JsonInclude.Include.NON_EMPTY); - try { - return mapper.writeValueAsString(instance); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - public static T configFromYaml(String yamlPath, Class c) { - return configFromYaml(new File(yamlPath), c); - } + public static Map modifiableMap(T... pairs) { + if (pairs.length % 2 != 0) { + throw new IllegalArgumentException(); + } else { + Map result = new HashMap<>(pairs.length / 2); - public static ConfigMap configMapFromYaml(String yamlPath, String name) { - try { - YAMLFactory yaml = new YAMLFactory(); - ObjectMapper mapper = new ObjectMapper(yaml); - YAMLParser yamlParser = yaml.createParser(new File(yamlPath)); - List list = mapper.readValues(yamlParser, new TypeReference() { }).readAll(); - Optional cmOpt = list.stream().filter(cm -> "ConfigMap".equals(cm.getKind()) && name.equals(cm.getMetadata().getName())).findFirst(); - if (cmOpt.isPresent()) { - return cmOpt.get(); - } else { - LOGGER.warn("ConfigMap {} not found in file {}", name, yamlPath); - return null; + for (int i = 0; i < pairs.length; i += 2) { + result.put(pairs[i], pairs[i + 1]); } - } catch (InvalidFormatException e) { - throw new IllegalArgumentException(e); - } catch (IOException e) { - throw new RuntimeException(e); - } - - } - public static T configFromYaml(File yamlFile, Class c) { - ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); - try { - return mapper.readValue(yamlFile, c); - } catch (InvalidFormatException e) { - throw new IllegalArgumentException(e); - } catch (IOException e) { - throw new RuntimeException(e); + return result; } } - public static String toJsonString(Object instance) { - ObjectMapper mapper = new ObjectMapper() - .setSerializationInclusion(JsonInclude.Include.NON_NULL); - try { - return mapper.writeValueAsString(instance); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - /** Map Streams utility methods */ - public static Map.Entry entry(K key, V value) { - return new AbstractMap.SimpleEntry<>(key, value); - } - - /** Method to create and write file */ - public static void writeFile(Path filePath, String text) { - try { - Files.writeString(filePath, text, StandardCharsets.UTF_8); - } catch (IOException e) { - LOGGER.warn("Exception during writing text in file", e); - } - } - - public static void checkOwnerReference(HasMetadata resource, HasMetadata parent) { + /** + * Checks that the resource has the owner reference pointing to the parent resource + * + * @param resource The resource where the owner reference should be checked + * @param owner The resource which should be the owner + */ + public static void checkOwnerReference(HasMetadata resource, HasMetadata owner) { assertThat(resource.getMetadata().getOwnerReferences().size(), is(1)); OwnerReference or = resource.getMetadata().getOwnerReferences().get(0); - assertThat(or.getApiVersion(), is(parent.getApiVersion())); - assertThat(or.getKind(), is(parent.getKind())); - assertThat(or.getName(), is(parent.getMetadata().getName())); - assertThat(or.getUid(), is(parent.getMetadata().getUid())); + assertThat(or.getApiVersion(), is(owner.getApiVersion())); + assertThat(or.getKind(), is(owner.getKind())); + assertThat(or.getName(), is(owner.getMetadata().getName())); + assertThat(or.getUid(), is(owner.getMetadata().getUid())); assertThat(or.getBlockOwnerDeletion(), is(false)); assertThat(or.getController(), is(false)); } @@ -393,6 +225,8 @@ public static void checkOwnerReference(HasMetadata resource, HasMetadata parent) * Changes the {@code subject} of the RoleBinding in the given YAML resource to be the * {@code strimzi-cluster-operator} {@code ServiceAccount} in the given namespace. * + * TODO: This is used only in system tests and should be moved there. + * * @param roleBindingFile The RoleBinding YAML file to load and change * @param namespace Namespace of the service account which should be the subject of this RoleBinding * @@ -413,19 +247,18 @@ public static String changeRoleBindingSubject(File roleBindingFile, String names } } - public static String getContent(File file, Function edit) { - YAMLMapper mapper = new YAMLMapper(); - try { - JsonNode node = mapper.readTree(file); - return edit.apply(node); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - public static Map parseImageMap(String str) { - if (str != null) { - StringTokenizer tok = new StringTokenizer(str, ", \t\n\r"); + /** + * Parse the image map String into a Map. + * + * TODO: This is used only in system tests and should be moved there. + * + * @param imageMapString String with the image map (contains key-value pairs separated by new lines in a single string) + * + * @return Map with the parsed images + */ + public static Map parseImageMap(String imageMapString) { + if (imageMapString != null) { + StringTokenizer tok = new StringTokenizer(imageMapString, ", \t\n\r"); HashMap map = new HashMap<>(); while (tok.hasMoreTokens()) { String versionImage = tok.nextToken(); @@ -456,18 +289,26 @@ public static int getFreePort() { /** * Awaits completion of the given stage using the default timeout. * - * @param stage the stage to await completion + * @param stage The stage to await completion + * + * @return Result of the completion stage + * + * @param Type of the completion stage result */ public static T await(CompletionStage stage) { - return await(stage, DEFAULT_TIMEOUT_DURATION, DEFAULT_TIMEOUT_UNIT); + return await(stage, 30, TimeUnit.SECONDS); } /** * Awaits completion of the given stage using the given timeout and unit. * - * @param stage the stage to await completion - * @param timeout the amount of time to wait for completion - * @param unit the unit of time give by the timeout parameter + * @param stage The stage to await completion + * @param timeout The amount of time to wait for completion + * @param unit The unit of time give by the timeout parameter + * + * @return Result of the completion stage + * + * @param Type of the completion stage result */ public static T await(CompletionStage stage, long timeout, TimeUnit unit) { try { @@ -489,108 +330,16 @@ public static T await(CompletionStage stage, long timeout, TimeUnit unit) * CompletionStage#whenComplete} to easily assert success without modifying the * result. * - * @param unused the result of a completion stage, unused by this method - * @param error an error thrown by an earlier completion stage + * @param unused The result of a completion stage, unused by this method + * @param error An error thrown by an earlier completion stage */ + @SuppressWarnings("unused") public static void assertSuccessful(Object unused, Throwable error) { assertThat(error, is(nullValue())); } /** - * Creates an empty file in the default temporary-file directory, using the given prefix and suffix. - * - * @param prefix The prefix of the empty file (default: UUID). - * @param suffix The suffix of the empty file (default: .tmp). - * - * @return The empty file just created. - */ - public static File tempFile(String prefix, String suffix) { - File file; - prefix = prefix == null ? UUID.randomUUID().toString() : prefix; - suffix = suffix == null ? ".tmp" : suffix; - try { - file = Files.createTempFile(prefix, suffix).toFile(); - } catch (IOException e) { - throw new RuntimeException(e); - } - file.deleteOnExit(); - return file; - } - - /** - * Get JSON content as string from resource. - * - * @param resourcePath Resource path. - * - * @return JSON content as string. - */ - public static String jsonFromResource(String resourcePath) { - try { - URI resourceURI = Objects.requireNonNull(TestUtils.class.getClassLoader().getResource(resourcePath)).toURI(); - try (Stream lines = Files.lines(Paths.get(resourceURI), UTF_8)) { - Optional content = lines.reduce((x, y) -> x + y); - - if (content.isEmpty()) { - throw new IOException(format("File %s from resources was empty", resourcePath)); - } - - return content.get(); - } - } catch (Throwable t) { - throw new RuntimeException(t); - } - } - - /** - * Creates a CRD resource in the Kubernetes cluster - * - * @param client Kubernetes client - * @param crdName Name of the CRD - * @param crdPath Path to the CRD YAML - */ - public static void createCrd(KubernetesClient client, String crdName, String crdPath) { - if (client.apiextensions().v1().customResourceDefinitions().withName(crdName).get() != null) { - deleteCrd(client, crdName); - } - - client.apiextensions().v1() - .customResourceDefinitions() - .load(crdPath) - .create(); - client.apiextensions().v1() - .customResourceDefinitions() - .load(crdPath) - .waitUntilCondition(TestUtils::isCrdEstablished, 10, TimeUnit.SECONDS); - } - - /** - * Checks if the CRD has been established - * - * @param crd The CRD resource - * - * @return True if the CRD is established. False otherwise. - */ - private static boolean isCrdEstablished(CustomResourceDefinition crd) { - return crd.getStatus() != null - && crd.getStatus().getConditions() != null - && crd.getStatus().getConditions().stream().anyMatch(c -> "Established".equals(c.getType()) && "True".equals(c.getStatus())); - } - - /** - * Deletes the CRD from the Kubernetes cluster - * - * @param client Kubernetes client - * @param crdName Name of the CRD - */ - public static void deleteCrd(KubernetesClient client, String crdName) { - if (client.apiextensions().v1().customResourceDefinitions().withName(crdName).get() != null) { - client.apiextensions().v1().customResourceDefinitions().withName(crdName).withPropagationPolicy(DeletionPropagation.BACKGROUND).delete(); - client.apiextensions().v1().customResourceDefinitions().withName(crdName).waitUntilCondition(Objects::isNull, 30_000, TimeUnit.MILLISECONDS); - } - } - - /** - * Creates the namespase. If the namespace already exists, it will delete it and recreate it. + * Creates the namespace. If the namespace already exists, it will delete it and recreate it. * * @param client Kubernetes client * @param namespace Namespace diff --git a/test/src/main/java/io/strimzi/test/WaitException.java b/test/src/main/java/io/strimzi/test/WaitException.java index e363d8a8c78..ef239c1833a 100644 --- a/test/src/main/java/io/strimzi/test/WaitException.java +++ b/test/src/main/java/io/strimzi/test/WaitException.java @@ -4,11 +4,24 @@ */ package io.strimzi.test; +/** + * Exception used to indicate that waiting for an event failed + */ public class WaitException extends RuntimeException { + /** + * Constructor + * + * @param message Exception message + */ public WaitException(String message) { super(message); } + /** + * Constructor + * + * @param cause Cause of the exception + */ public WaitException(Throwable cause) { super(cause); } diff --git a/test/src/main/java/io/strimzi/test/annotations/IsolatedTest.java b/test/src/main/java/io/strimzi/test/annotations/IsolatedTest.java index a9a2dacb87d..e82171f1625 100644 --- a/test/src/main/java/io/strimzi/test/annotations/IsolatedTest.java +++ b/test/src/main/java/io/strimzi/test/annotations/IsolatedTest.java @@ -26,5 +26,8 @@ @ResourceLock(mode = ResourceAccessMode.READ_WRITE, value = "global") @Test public @interface IsolatedTest { + /** + * @return The description of why this test needs to run as isolated + */ String value() default ""; } diff --git a/test/src/main/java/io/strimzi/test/interfaces/ExtensionContextParameterResolver.java b/test/src/main/java/io/strimzi/test/interfaces/ExtensionContextParameterResolver.java index a3fa453d37f..2abb98e4b14 100644 --- a/test/src/main/java/io/strimzi/test/interfaces/ExtensionContextParameterResolver.java +++ b/test/src/main/java/io/strimzi/test/interfaces/ExtensionContextParameterResolver.java @@ -9,6 +9,9 @@ import org.junit.jupiter.api.extension.ParameterResolutionException; import org.junit.jupiter.api.extension.ParameterResolver; +/** + * Extension context parameter resolver used in the Test separator + */ public class ExtensionContextParameterResolver implements ParameterResolver { @Override public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { diff --git a/test/src/main/java/io/strimzi/test/interfaces/TestSeparator.java b/test/src/main/java/io/strimzi/test/interfaces/TestSeparator.java index 31e69af3757..498f0e824e5 100644 --- a/test/src/main/java/io/strimzi/test/interfaces/TestSeparator.java +++ b/test/src/main/java/io/strimzi/test/interfaces/TestSeparator.java @@ -13,17 +13,36 @@ import java.util.Collections; +/** + * Separates different tests in the log output + */ @ExtendWith(ExtensionContextParameterResolver.class) public interface TestSeparator { + /** + * Logger used to log the separator message + */ Logger LOGGER = LogManager.getLogger(TestSeparator.class); + /** + * Separator character used in the log output + */ String SEPARATOR_CHAR = "#"; + /** + * Prints the separator at the start of the test + * + * @param testContext Test context + */ @BeforeEach default void beforeEachTest(ExtensionContext testContext) { LOGGER.info(String.join("", Collections.nCopies(76, SEPARATOR_CHAR))); LOGGER.info(String.format("%s.%s-STARTED", testContext.getRequiredTestClass().getName(), testContext.getRequiredTestMethod().getName())); } + /** + * Prints the separator at the end of the test + * + * @param testContext Test context + */ @AfterEach default void afterEachTest(ExtensionContext testContext) { LOGGER.info(String.format("%s.%s-FINISHED", testContext.getRequiredTestClass().getName(), testContext.getRequiredTestMethod().getName())); diff --git a/topic-operator/src/test/java/io/strimzi/operator/topic/TopicOperatorTestUtil.java b/topic-operator/src/test/java/io/strimzi/operator/topic/TopicOperatorTestUtil.java index 8f81728b80c..4d8ec591f6e 100644 --- a/topic-operator/src/test/java/io/strimzi/operator/topic/TopicOperatorTestUtil.java +++ b/topic-operator/src/test/java/io/strimzi/operator/topic/TopicOperatorTestUtil.java @@ -16,6 +16,7 @@ import io.strimzi.api.kafka.model.topic.KafkaTopic; import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.topic.model.ReconcilableTopic; +import io.strimzi.test.CrdUtils; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -52,7 +53,7 @@ public static void setupKubeCluster(KubernetesClient kubernetesClient, String na createOrReplace(kubernetesClient, "file://" + TestUtils.USER_PATH + "/../packaging/install/topic-operator/01-ServiceAccount-strimzi-topic-operator.yaml", namespace); createOrReplace(kubernetesClient, "file://" + TestUtils.USER_PATH + "/../packaging/install/topic-operator/02-Role-strimzi-topic-operator.yaml", namespace); createOrReplace(kubernetesClient, "file://" + TestUtils.USER_PATH + "/../packaging/install/topic-operator/03-RoleBinding-strimzi-topic-operator.yaml", namespace); - createOrReplace(kubernetesClient, "file://" + TestUtils.CRD_TOPIC); + createOrReplace(kubernetesClient, "file://" + CrdUtils.CRD_TOPIC); } private static void createOrReplace(KubernetesClient kubernetesClient, String resourcesPath) { diff --git a/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/CruiseControlHandlerTest.java b/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/CruiseControlHandlerTest.java index 6d5b9260d42..c80c46cc2fb 100644 --- a/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/CruiseControlHandlerTest.java +++ b/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/CruiseControlHandlerTest.java @@ -22,6 +22,7 @@ import io.strimzi.operator.topic.metrics.TopicOperatorMetricsProvider; import io.strimzi.operator.topic.model.ReconcilableTopic; import io.strimzi.operator.topic.model.Results; +import io.strimzi.test.ReadWriteUtils; import io.strimzi.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -61,15 +62,15 @@ public static void beforeAll() throws IOException { new TopicOperatorMetricsProvider(new SimpleMeterRegistry())); serverPort = TestUtils.getFreePort(); - File tlsKeyFile = TestUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".key"); - tlsCrtFile = TestUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".crt"); + File tlsKeyFile = ReadWriteUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".key"); + tlsCrtFile = ReadWriteUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".crt"); new MockCertManager().generateSelfSignedCert(tlsKeyFile, tlsCrtFile, new Subject.Builder().withCommonName("Trusted Test CA").build(), 365); - apiUserFile = TestUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".username"); + apiUserFile = ReadWriteUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".username"); try (PrintWriter out = new PrintWriter(apiUserFile.getAbsolutePath())) { out.print("topic-operator-admin"); } - apiPassFile = TestUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".password"); + apiPassFile = ReadWriteUtils.tempFile(CruiseControlHandlerTest.class.getSimpleName(), ".password"); try (PrintWriter out = new PrintWriter(apiPassFile.getAbsolutePath())) { out.print("changeit"); } diff --git a/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/MockCruiseControl.java b/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/MockCruiseControl.java index 9b2b88024ef..93bcd04a030 100644 --- a/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/MockCruiseControl.java +++ b/topic-operator/src/test/java/io/strimzi/operator/topic/cruisecontrol/MockCruiseControl.java @@ -8,7 +8,7 @@ import io.strimzi.operator.common.model.cruisecontrol.CruiseControlEndpoints; import io.strimzi.operator.common.model.cruisecontrol.CruiseControlParameters; import io.strimzi.operator.topic.TopicOperatorTestUtil; -import io.strimzi.test.TestUtils; +import io.strimzi.test.ReadWriteUtils; import org.mockserver.configuration.ConfigurationProperties; import org.mockserver.integration.ClientAndServer; import org.mockserver.model.Header; @@ -87,7 +87,7 @@ public void expectTopicConfigSuccessResponse(File apiUserFile, File apiPassFile) .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/topic-config-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/topic-config-success.json"))) .withHeader(Header.header("User-Task-ID", "8911ca89-351f-888-8d0f-9aade00e098h")) .withDelay(TimeUnit.SECONDS, 0)); @@ -108,7 +108,7 @@ public void expectTopicConfigSuccessResponse(File apiUserFile, File apiPassFile) .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/topic-config-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/topic-config-success.json"))) .withHeader(Header.header("User-Task-ID", "8911ca89-351f-888-8d0f-9aade00e098h")) .withDelay(TimeUnit.SECONDS, 0)); @@ -126,7 +126,7 @@ public void expectTopicConfigSuccessResponse(File apiUserFile, File apiPassFile) .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/topic-config-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/topic-config-success.json"))) .withHeader(Header.header("User-Task-ID", "8911ca89-351f-888-8d0f-9aade00e098h")) .withDelay(TimeUnit.SECONDS, 0)); @@ -146,7 +146,7 @@ public void expectTopicConfigSuccessResponse(File apiUserFile, File apiPassFile) .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/topic-config-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/topic-config-success.json"))) .withHeader(Header.header("User-Task-ID", "8911ca89-351f-888-8d0f-9aade00e098h")) .withDelay(TimeUnit.SECONDS, 0)); } @@ -168,7 +168,7 @@ public void expectTopicConfigErrorResponse(File apiUserFile, File apiPassFile) { .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.INTERNAL_SERVER_ERROR_500.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/topic-config-failure.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/topic-config-failure.json"))) .withHeader(Header.header("User-Task-ID", "8911ca89-351f-888-8d0f-9aade00e098h")) .withDelay(TimeUnit.SECONDS, 0)); } @@ -225,7 +225,7 @@ public void expectUserTasksSuccessResponse(File apiUserFile, File apiPassFile) { .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/user-tasks-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/user-tasks-success.json"))) .withDelay(TimeUnit.SECONDS, 0)); // encryption and authentication enabled @@ -243,7 +243,7 @@ public void expectUserTasksSuccessResponse(File apiUserFile, File apiPassFile) { .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/user-tasks-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/user-tasks-success.json"))) .withDelay(TimeUnit.SECONDS, 0)); // encryption only @@ -258,7 +258,7 @@ public void expectUserTasksSuccessResponse(File apiUserFile, File apiPassFile) { .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/user-tasks-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/user-tasks-success.json"))) .withDelay(TimeUnit.SECONDS, 0)); // authentication only @@ -275,7 +275,7 @@ public void expectUserTasksSuccessResponse(File apiUserFile, File apiPassFile) { .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.OK_200.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/user-tasks-success.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/user-tasks-success.json"))) .withDelay(TimeUnit.SECONDS, 0)); } @@ -294,7 +294,7 @@ public void expectUserTasksErrorResponse(File apiUserFile, File apiPassFile) { .respond( HttpResponse.response() .withStatusCode(HttpStatusCode.INTERNAL_SERVER_ERROR_500.code()) - .withBody(new JsonBody(TestUtils.jsonFromResource("cruisecontrol/user-tasks-failure.json"))) + .withBody(new JsonBody(ReadWriteUtils.readSingleLineJsonStringFromResourceFile("cruisecontrol/user-tasks-failure.json"))) .withDelay(TimeUnit.SECONDS, 0)); } diff --git a/user-operator/src/test/java/io/strimzi/operator/user/model/KafkaUserModelTest.java b/user-operator/src/test/java/io/strimzi/operator/user/model/KafkaUserModelTest.java index 56bb378cd10..f1888c60292 100644 --- a/user-operator/src/test/java/io/strimzi/operator/user/model/KafkaUserModelTest.java +++ b/user-operator/src/test/java/io/strimzi/operator/user/model/KafkaUserModelTest.java @@ -31,8 +31,8 @@ import java.util.Base64; import java.util.HashSet; import java.util.Map; +import java.util.Set; -import static io.strimzi.test.TestUtils.set; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.CoreMatchers.hasItem; @@ -141,7 +141,7 @@ public void testGenerateSecret() { model.maybeGenerateCertificates(Reconciliation.DUMMY_RECONCILIATION, mockCertManager, passwordGenerator, clientsCaCert, clientsCaKey, null, 365, 30, null, Clock.systemUTC()); Secret generatedSecret = model.generateSecret(); - assertThat(generatedSecret.getData().keySet(), is(set("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); + assertThat(generatedSecret.getData().keySet(), is(Set.of("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); assertThat(generatedSecret.getMetadata().getName(), is(ResourceUtils.NAME)); assertThat(generatedSecret.getMetadata().getNamespace(), is(ResourceUtils.NAMESPACE)); @@ -165,7 +165,7 @@ public void testGenerateSecretWithPrefix() { model.maybeGenerateCertificates(Reconciliation.DUMMY_RECONCILIATION, mockCertManager, passwordGenerator, clientsCaCert, clientsCaKey, null, 365, 30, null, Clock.systemUTC()); Secret generatedSecret = model.generateSecret(); - assertThat(generatedSecret.getData().keySet(), is(set("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); + assertThat(generatedSecret.getData().keySet(), is(Set.of("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); assertThat(generatedSecret.getMetadata().getName(), is(secretPrefix + ResourceUtils.NAME)); assertThat(generatedSecret.getMetadata().getNamespace(), is(ResourceUtils.NAMESPACE)); @@ -202,7 +202,7 @@ public void testGenerateSecretWithMetadataOverrides() { model.maybeGenerateCertificates(Reconciliation.DUMMY_RECONCILIATION, mockCertManager, passwordGenerator, clientsCaCert, clientsCaKey, null, 365, 30, null, Clock.systemUTC()); Secret generatedSecret = model.generateSecret(); - assertThat(generatedSecret.getData().keySet(), is(set("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); + assertThat(generatedSecret.getData().keySet(), is(Set.of("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); assertThat(generatedSecret.getMetadata().getName(), is(ResourceUtils.NAME)); assertThat(generatedSecret.getMetadata().getNamespace(), is(ResourceUtils.NAMESPACE)); @@ -341,7 +341,7 @@ public void testGenerateSecretGeneratesKeyStoreWhenOldVersionSecretExists() { model.maybeGenerateCertificates(Reconciliation.DUMMY_RECONCILIATION, mockCertManager, passwordGenerator, clientsCaCert, clientsCaKey, oldSecret, 365, 30, null, Clock.systemUTC()); Secret generatedSecret = model.generateSecret(); - assertThat(generatedSecret.getData().keySet(), is(set("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); + assertThat(generatedSecret.getData().keySet(), is(Set.of("ca.crt", "user.crt", "user.key", "user.p12", "user.password"))); assertThat(Util.decodeFromBase64(generatedSecret.getData().get("ca.crt")), is("clients-ca-crt")); assertThat(Util.decodeFromBase64(generatedSecret.getData().get("user.crt")), is(MockCertManager.userCert())); From 6d5551adcd5ebefbdc65010e6dd9e6b20d84d741 Mon Sep 17 00:00:00 2001 From: Jakub Scholz Date: Sat, 7 Sep 2024 22:25:46 +0200 Subject: [PATCH 2/3] Fix topic name manipulation Signed-off-by: Jakub Scholz --- .../io/strimzi/systemtest/upgrade/AbstractUpgradeST.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java b/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java index fd5946f052a..acb98d35a17 100644 --- a/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java +++ b/systemtest/src/test/java/io/strimzi/systemtest/upgrade/AbstractUpgradeST.java @@ -414,7 +414,7 @@ protected void setupEnvAndUpgradeClusterOperator(String clusterOperatorNamespace kafkaTopicYaml = new File(dir, pathToTopicExamples); cmdKubeClient(testStorage.getNamespaceName()).applyContent(ReadWriteUtils.readFile(kafkaTopicYaml) - .replace("name: \"my-topic\"", "name: \"" + testStorage.getTopicName() + "\"") + .replace("name: my-topic", "name: " + testStorage.getTopicName()) .replace("partitions: 1", "partitions: 3") .replace("replicas: 1", "replicas: 3") + " min.insync.replicas: 2"); @@ -443,8 +443,8 @@ protected void setupEnvAndUpgradeClusterOperator(String clusterOperatorNamespace } private String getKafkaYamlWithName(String name) { - String initialName = "name: \"my-topic\""; - String newName = "name: \"%s\"".formatted(name); + String initialName = "name: my-topic"; + String newName = "name: %s".formatted(name); return ReadWriteUtils.readFile(kafkaTopicYaml).replace(initialName, newName); } From 19324f100dbf1119c3bbcccda0b60a1c7c7c2585 Mon Sep 17 00:00:00 2001 From: Jakub Scholz Date: Sun, 8 Sep 2024 00:27:40 +0200 Subject: [PATCH 3/3] Fix metrics ConfigMap loading Signed-off-by: Jakub Scholz --- .../templates/crd/KafkaConnectTemplates.java | 4 +-- .../crd/KafkaMirrorMaker2Templates.java | 4 +-- .../templates/crd/KafkaTemplates.java | 4 +-- .../strimzi/systemtest/utils/FileUtils.java | 36 +++++++++++++++++++ 4 files changed, 42 insertions(+), 6 deletions(-) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java index 6185db83917..05dd73e446e 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaConnectTemplates.java @@ -18,7 +18,7 @@ import io.strimzi.api.kafka.model.kafka.KafkaResources; import io.strimzi.systemtest.Environment; import io.strimzi.systemtest.TestConstants; -import io.strimzi.test.ReadWriteUtils; +import io.strimzi.systemtest.utils.FileUtils; import io.strimzi.test.k8s.KubeClusterResource; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -68,7 +68,7 @@ public static KafkaConnectBuilder kafkaConnectWithMetricsAndFileSinkPlugin( } public static ConfigMap connectMetricsConfigMap(String namespaceName, String kafkaConnectClusterName) { - return new ConfigMapBuilder(ReadWriteUtils.readObjectFromYamlFilepath(TestConstants.PATH_TO_KAFKA_CONNECT_METRICS_CONFIG, ConfigMap.class)) + return new ConfigMapBuilder(FileUtils.extractConfigMapFromYAMLWithResources(TestConstants.PATH_TO_KAFKA_CONNECT_METRICS_CONFIG, "connect-metrics")) .editOrNewMetadata() .withNamespace(namespaceName) .withName(getConfigMapName(kafkaConnectClusterName)) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java index 4a313386e99..09abac26866 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaMirrorMaker2Templates.java @@ -16,8 +16,8 @@ import io.strimzi.systemtest.Environment; import io.strimzi.systemtest.TestConstants; import io.strimzi.systemtest.storage.TestStorage; +import io.strimzi.systemtest.utils.FileUtils; import io.strimzi.systemtest.utils.kafkaUtils.KafkaUtils; -import io.strimzi.test.ReadWriteUtils; public class KafkaMirrorMaker2Templates { @@ -66,7 +66,7 @@ public static KafkaMirrorMaker2Builder kafkaMirrorMaker2WithMetrics( } public static ConfigMap mirrorMaker2MetricsConfigMap(String namespaceName, String kafkaMirrorMaker2Name) { - return new ConfigMapBuilder(ReadWriteUtils.readObjectFromYamlFilepath(TestConstants.PATH_TO_KAFKA_MIRROR_MAKER_2_METRICS_CONFIG, ConfigMap.class)) + return new ConfigMapBuilder(FileUtils.extractConfigMapFromYAMLWithResources(TestConstants.PATH_TO_KAFKA_MIRROR_MAKER_2_METRICS_CONFIG, "mirror-maker-2-metrics")) .editOrNewMetadata() .withNamespace(namespaceName) .withName(getConfigMapName(kafkaMirrorMaker2Name)) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java index 75219c118b7..1647bca8dff 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/templates/crd/KafkaTemplates.java @@ -18,8 +18,8 @@ import io.strimzi.operator.common.Annotations; import io.strimzi.systemtest.Environment; import io.strimzi.systemtest.TestConstants; +import io.strimzi.systemtest.utils.FileUtils; import io.strimzi.systemtest.utils.TestKafkaVersion; -import io.strimzi.test.ReadWriteUtils; import java.util.Collections; @@ -183,7 +183,7 @@ public static KafkaBuilder kafkaWithMetricsAndCruiseControlWithMetrics(String na public static ConfigMap kafkaMetricsConfigMap(String namespaceName, String kafkaClusterName) { String configMapName = kafkaClusterName + METRICS_KAFKA_CONFIG_MAP_SUFFIX; - return new ConfigMapBuilder(ReadWriteUtils.readObjectFromYamlFilepath(TestConstants.PATH_TO_KAFKA_METRICS_CONFIG, ConfigMap.class)) + return new ConfigMapBuilder(FileUtils.extractConfigMapFromYAMLWithResources(TestConstants.PATH_TO_KAFKA_METRICS_CONFIG, "kafka-metrics")) .editMetadata() .withName(configMapName) .withNamespace(namespaceName) diff --git a/systemtest/src/main/java/io/strimzi/systemtest/utils/FileUtils.java b/systemtest/src/main/java/io/strimzi/systemtest/utils/FileUtils.java index 2d28584de6a..e7de12049ce 100644 --- a/systemtest/src/main/java/io/strimzi/systemtest/utils/FileUtils.java +++ b/systemtest/src/main/java/io/strimzi/systemtest/utils/FileUtils.java @@ -4,7 +4,13 @@ */ package io.strimzi.systemtest.utils; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.exc.InvalidFormatException; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; +import io.fabric8.kubernetes.api.model.ConfigMap; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -19,6 +25,8 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; +import java.util.List; +import java.util.Optional; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; @@ -116,4 +124,32 @@ public static File updateNamespaceOfYamlFile(String pathToOrigin, String namespa } return null; } + + /** + * Loads list of YAML documents from a file and extracts a ConfigMap with given name from it. + * + * @param yamlPath Path to the YAML file + * @param name Name of the ConfigMap that should be extracted + * + * @return Extracted ConfigMap + */ + public static ConfigMap extractConfigMapFromYAMLWithResources(String yamlPath, String name) { + try { + YAMLFactory yaml = new YAMLFactory(); + ObjectMapper mapper = new ObjectMapper(yaml); + YAMLParser yamlParser = yaml.createParser(new File(yamlPath)); + List list = mapper.readValues(yamlParser, new TypeReference() { }).readAll(); + Optional cmOpt = list.stream().filter(cm -> "ConfigMap".equals(cm.getKind()) && name.equals(cm.getMetadata().getName())).findFirst(); + if (cmOpt.isPresent()) { + return cmOpt.get(); + } else { + LOGGER.warn("ConfigMap {} not found in file {}", name, yamlPath); + return null; + } + } catch (InvalidFormatException e) { + throw new IllegalArgumentException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } }