From c56e82134a85ae3fd3dbd202c1fb311137c159dd Mon Sep 17 00:00:00 2001 From: Anatolii Popov Date: Sun, 11 Jun 2023 19:31:03 +0300 Subject: [PATCH] Migrating tests to AssertJ --- build.gradle | 2 +- .../connect/transforms/IntegrationTest.java | 6 +- .../transforms/TestSourceConnector.java | 2 +- .../converters/MoneyConverterTest.java | 39 +++--- .../transforms/ConcatFieldsConfigTest.java | 24 ++-- .../connect/transforms/ConcatFieldsTest.java | 50 ++++---- .../ExtractTimestampConfigTest.java | 33 +++-- .../transforms/ExtractTimestampTest.java | 72 ++++++----- .../transforms/ExtractTopicConfigTest.java | 14 +-- .../connect/transforms/ExtractTopicTest.java | 111 ++++++++-------- .../transforms/FilterByFieldValueTest.java | 119 +++++++++++------- .../connect/transforms/HashConfigTest.java | 26 ++-- .../kafka/connect/transforms/HashTest.java | 72 +++++------ .../TombstoneHandlerConfigTest.java | 32 ++--- .../transforms/TombstoneHandlerTest.java | 22 ++-- .../connect/transforms/utils/HexTest.java | 10 +- 16 files changed, 306 insertions(+), 328 deletions(-) diff --git a/build.gradle b/build.gradle index 29f1398..5e5ebe0 100644 --- a/build.gradle +++ b/build.gradle @@ -97,10 +97,10 @@ dependencies { implementation "org.slf4j:slf4j-api:1.7.36" testImplementation "org.junit.jupiter:junit-jupiter:5.9.3" - testImplementation "org.hamcrest:hamcrest:2.2" testImplementation "org.apache.kafka:connect-api:$kafkaVersion" testImplementation "org.testcontainers:junit-jupiter:$testcontainersVersion" testImplementation "io.debezium:debezium-api:$debeziumVersion" + testImplementation "org.assertj:assertj-core:3.24.2" testRuntimeOnly "org.apache.logging.log4j:log4j-slf4j-impl:2.20.0" testRuntimeOnly "org.apache.logging.log4j:log4j-api:2.20.0" diff --git a/src/integration-test/java/io/aiven/kafka/connect/transforms/IntegrationTest.java b/src/integration-test/java/io/aiven/kafka/connect/transforms/IntegrationTest.java index 1023e6d..c5b1edb 100644 --- a/src/integration-test/java/io/aiven/kafka/connect/transforms/IntegrationTest.java +++ b/src/integration-test/java/io/aiven/kafka/connect/transforms/IntegrationTest.java @@ -45,7 +45,7 @@ import org.testcontainers.junit.jupiter.Testcontainers; import org.testcontainers.utility.DockerImageName; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; @Testcontainers final class IntegrationTest { @@ -204,9 +204,9 @@ final void checkMessageTopics(final TopicPartition originalTopicPartition, final final Map endOffsets = consumer.endOffsets( Arrays.asList(originalTopicPartition, newTopicPartition)); // The original topic should be empty. - assertEquals(0, endOffsets.get(originalTopicPartition)); + assertThat(endOffsets.get(originalTopicPartition)).isZero(); // The new topic should be non-empty. - assertEquals(TestSourceConnector.MESSAGES_TO_PRODUCE, endOffsets.get(newTopicPartition)); + assertThat(endOffsets).containsEntry(newTopicPartition, TestSourceConnector.MESSAGES_TO_PRODUCE); } private void waitForCondition(final Supplier conditionChecker, diff --git a/src/integration-test/java/io/aiven/kafka/connect/transforms/TestSourceConnector.java b/src/integration-test/java/io/aiven/kafka/connect/transforms/TestSourceConnector.java index 19988bc..8e2e642 100644 --- a/src/integration-test/java/io/aiven/kafka/connect/transforms/TestSourceConnector.java +++ b/src/integration-test/java/io/aiven/kafka/connect/transforms/TestSourceConnector.java @@ -36,7 +36,7 @@ *

It just produces a fixed number of struct records. */ public class TestSourceConnector extends SourceConnector { - static final int MESSAGES_TO_PRODUCE = 10; + static final long MESSAGES_TO_PRODUCE = 10L; static final String ORIGINAL_TOPIC = "original-topic"; static final String NEW_TOPIC = "new-topic"; diff --git a/src/test/java/io/aiven/kafka/connect/debezium/converters/MoneyConverterTest.java b/src/test/java/io/aiven/kafka/connect/debezium/converters/MoneyConverterTest.java index 6ec8e8c..bd67917 100644 --- a/src/test/java/io/aiven/kafka/connect/debezium/converters/MoneyConverterTest.java +++ b/src/test/java/io/aiven/kafka/connect/debezium/converters/MoneyConverterTest.java @@ -30,9 +30,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; public class MoneyConverterTest { @@ -59,11 +58,11 @@ void teardown() { @Test void shouldRegisterCorrectSchema() { transform.configure(prop); - assertNull(registration.currFieldSchema); + assertThat(registration.currFieldSchema).isNull(); transform.converterFor(new MoneyTestRelationalColumn(), registration); - assertEquals(registration.currFieldSchema.schema().name(), "price"); - assertEquals(registration.currFieldSchema.schema().type(), Schema.Type.STRING); + assertThat(registration.currFieldSchema.schema().name()).isEqualTo("price"); + assertThat(registration.currFieldSchema.schema().type()).isEqualTo(Schema.Type.STRING); } @Test @@ -72,40 +71,40 @@ void shouldDoNothingIfColumnIsNotMoney() { transform.converterFor(new DummyRelationalColumn(), registration); - assertNull(registration.currFieldSchema); - assertNull(registration.currConverter); + assertThat(registration.currFieldSchema).isNull(); + assertThat(registration.currConverter).isNull(); } @Test void shouldFormatDataToMoneyFormat() { - assertNull(registration.currConverter); + assertThat(registration.currConverter).isNull(); transform.converterFor(new MoneyTestRelationalColumn(), registration); final String result = (String) registration.currConverter.convert(BigDecimal.valueOf(103.6999)); - assertEquals(result, "103.70"); + assertThat(result).isEqualTo("103.70"); final String result2 = (String) registration.currConverter.convert((long) 103); - assertEquals(result2, "103.00"); + assertThat(result2).isEqualTo("103.00"); } @Test void shouldFailIfDataIsNotBigDecimal() { - assertNull(registration.currConverter); + assertThat(registration.currConverter).isNull(); transform.converterFor(new MoneyTestRelationalColumn(), registration); - final Throwable e = assertThrows(IllegalArgumentException.class, - () -> registration.currConverter.convert("103.6999")); - assertEquals(e.getMessage(), "Money type should have BigDecimal type"); + assertThatThrownBy(() -> registration.currConverter.convert("103.6999")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Money type should have BigDecimal type"); } @Test void shouldFailIfDataIsMissing() { - assertNull(registration.currConverter); + assertThat(registration.currConverter).isNull(); transform.converterFor(new MoneyTestRelationalColumn(), registration); - final Throwable e = assertThrows(IllegalArgumentException.class, - () -> registration.currConverter.convert(null)); - assertEquals(e.getMessage(), "Money column is not optional, but data is null"); + assertThatThrownBy(() -> registration.currConverter.convert(null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Money column is not optional, but data is null"); } @Test @@ -117,7 +116,7 @@ void shouldDoNothingIfColumnIsOptional() { transform.converterFor(moneyColumn, registration); final String result = (String) registration.currConverter.convert(null); - assertNull(result); + assertThat(result).isNull(); } class StubConverterRegistration implements CustomConverter.ConverterRegistration { diff --git a/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsConfigTest.java b/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsConfigTest.java index 54dd9d0..9454527 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsConfigTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsConfigTest.java @@ -28,25 +28,25 @@ import static io.aiven.kafka.connect.transforms.ConcatFieldsConfig.FIELD_NAMES_CONFIG; import static io.aiven.kafka.connect.transforms.ConcatFieldsConfig.FIELD_REPLACE_MISSING_CONFIG; import static io.aiven.kafka.connect.transforms.ConcatFieldsConfig.OUTPUT_FIELD_NAME_CONFIG; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; class ConcatFieldsConfigTest { @Test void emptyConfig() { final Map props = new HashMap<>(); - final Throwable e = assertThrows(ConfigException.class, () -> new ConcatFieldsConfig(props)); - assertEquals("Missing required configuration \"field.names\" which has no default value.", - e.getMessage()); + assertThatThrownBy(() -> new ConcatFieldsConfig(props)) + .isInstanceOf(ConfigException.class) + .hasMessage("Missing required configuration \"field.names\" which has no default value."); } @Test void emptyFieldName() { final Map props = new HashMap<>(); props.put(FIELD_NAMES_CONFIG, ""); - final Throwable e = assertThrows(ConfigException.class, () -> new ConcatFieldsConfig(props)); - assertEquals("Missing required configuration \"output.field.name\" which has no default value.", - e.getMessage()); + assertThatThrownBy(() -> new ConcatFieldsConfig(props)) + .isInstanceOf(ConfigException.class) + .hasMessage("Missing required configuration \"output.field.name\" which has no default value."); } @Test @@ -57,9 +57,9 @@ void definedFieldName() { props.put(DELIMITER_CONFIG, "-"); props.put(FIELD_REPLACE_MISSING_CONFIG, "*"); final ConcatFieldsConfig config = new ConcatFieldsConfig(props); - assertEquals(Arrays.asList("test", "foo", "bar"), config.fieldNames()); - assertEquals("combined", config.outputFieldName()); - assertEquals("-", config.delimiter()); - assertEquals("*", config.fieldReplaceMissing()); + assertThat(config.fieldNames()).isEqualTo(Arrays.asList("test", "foo", "bar")); + assertThat(config.outputFieldName()).isEqualTo("combined"); + assertThat(config.delimiter()).isEqualTo("-"); + assertThat(config.fieldReplaceMissing()).isEqualTo("*"); } } diff --git a/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsTest.java b/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsTest.java index 04179a1..cecc6cd 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/ConcatFieldsTest.java @@ -33,9 +33,9 @@ import static io.aiven.kafka.connect.transforms.ConcatFieldsConfig.FIELD_NAMES_CONFIG; import static io.aiven.kafka.connect.transforms.ConcatFieldsConfig.FIELD_REPLACE_MISSING_CONFIG; import static io.aiven.kafka.connect.transforms.ConcatFieldsConfig.OUTPUT_FIELD_NAME_CONFIG; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThatNoException; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.Assert.assertEquals; abstract class ConcatFieldsTest { private static final String FIELD = "combined"; @@ -65,30 +65,27 @@ abstract class ConcatFieldsTest { @Test void recordNotStructOrMap() { - final SinkRecord originalRecord = record(SchemaBuilder.INT8_SCHEMA, (byte) 123); - final Throwable e = assertThrows(DataException.class, - () -> transformation().apply(originalRecord)); - assertEquals("Value type must be STRUCT or MAP: " + originalRecord, - e.getMessage()); + final SinkRecord originalRecord = record(Schema.INT8_SCHEMA, (byte) 123); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage("Value type must be STRUCT or MAP: " + originalRecord); } @Test void recordStructNull() { final Schema schema = SchemaBuilder.struct().schema(); final SinkRecord originalRecord = record(schema, null); - final Throwable e = assertThrows(DataException.class, - () -> transformation().apply(originalRecord)); - assertEquals(dataPlace() + " Value can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " Value can't be null: " + originalRecord); } @Test void recordMapNull() { final SinkRecord originalRecord = record(null, null); - final Throwable e = assertThrows(DataException.class, - () -> transformation().apply(originalRecord)); - assertEquals(dataPlace() + " Value can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " Value can't be null: " + originalRecord); } @Test @@ -100,9 +97,9 @@ void structWithSchemaMissingField() { .field(AGE_FIELD, Schema.OPTIONAL_INT64_SCHEMA) .build(); final SinkRecord originalRecord = record(schema, new Struct(schema)); - final Throwable e = assertThrows(DataException.class, - () -> transformation().apply(originalRecord)); - assertEquals("Invalid value: null used for required field: \"bar\", schema type: STRING", e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage("Invalid value: null used for required field: \"bar\", schema type: STRING"); } @Test @@ -114,26 +111,27 @@ void structWithMissingField() { .field(AGE_FIELD, Schema.OPTIONAL_INT64_SCHEMA) .build(); final SinkRecord originalRecord = record(null, new Struct(schema)); - final Throwable e = assertThrows(DataException.class, - () -> transformation().apply(originalRecord)); - assertEquals("Invalid value: null used for required field: \"bar\", schema type: STRING", e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage("Invalid value: null used for required field: \"bar\", schema type: STRING"); } @Test void mapWithMissingField() { final SinkRecord originalRecord = record(null, new HashMap<>()); - assertDoesNotThrow(() -> transformation().apply(originalRecord), - FIELD + " field must be present and its value can't be null: " + originalRecord); + assertThatNoException() + .describedAs(FIELD + " field must be present and its value can't be null: " + originalRecord) + .isThrownBy(() -> transformation().apply(originalRecord)); } @Test void mapWithoutSchema() { - final HashMap valueMap = new HashMap<>(); + final Map valueMap = new HashMap<>(); valueMap.put(BAR_FIELD, BAR_VALUE); valueMap.put(TEST_FIELD, TEST_VALUE); valueMap.put(AGE_FIELD, AGE_VALUE); valueMap.put(FOO_FIELD, FOO_VALUE); - final HashMap newValueMap = new HashMap<>(); + final Map newValueMap = new HashMap<>(); newValueMap.put(BAR_FIELD, BAR_VALUE); newValueMap.put(FIELD, TEST_VALUE + "-" + FOO_VALUE + "-" + BAR_VALUE + "-" + AGE_VALUE); newValueMap.put(TEST_FIELD, TEST_VALUE); diff --git a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampConfigTest.java b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampConfigTest.java index 08bd22f..8c93a75 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampConfigTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampConfigTest.java @@ -23,25 +23,25 @@ import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; class ExtractTimestampConfigTest { @Test void emptyConfig() { final Map props = new HashMap<>(); - final Throwable e = assertThrows(ConfigException.class, () -> new ExtractTimestampConfig(props)); - assertEquals("Missing required configuration \"field.name\" which has no default value.", - e.getMessage()); + assertThatThrownBy(() -> new ExtractTimestampConfig(props)) + .isInstanceOf(ConfigException.class) + .hasMessage("Missing required configuration \"field.name\" which has no default value."); } @Test void emptyFieldName() { final Map props = new HashMap<>(); props.put("field.name", ""); - final Throwable e = assertThrows(ConfigException.class, () -> new ExtractTimestampConfig(props)); - assertEquals("Invalid value for configuration field.name: String must be non-empty", - e.getMessage()); + assertThatThrownBy(() -> new ExtractTimestampConfig(props)) + .isInstanceOf(ConfigException.class) + .hasMessage("Invalid value for configuration field.name: String must be non-empty"); } @Test @@ -49,7 +49,7 @@ void definedFieldName() { final Map props = new HashMap<>(); props.put("field.name", "test"); final ExtractTimestampConfig config = new ExtractTimestampConfig(props); - assertEquals("test", config.fieldName()); + assertThat(config.fieldName()).isEqualTo("test"); } @Test @@ -57,7 +57,7 @@ void emptyTimestampResolution() { final var props = new HashMap<>(); props.put("field.name", "test"); final var config = new ExtractTimestampConfig(props); - assertEquals(ExtractTimestampConfig.TimestampResolution.MILLISECONDS, config.timestampResolution()); + assertThat(config.timestampResolution()).isEqualTo(ExtractTimestampConfig.TimestampResolution.MILLISECONDS); } @Test @@ -69,7 +69,7 @@ void definedTimestampResolutionInSeconds() { ExtractTimestampConfig.TimestampResolution.SECONDS.resolution ); final var config = new ExtractTimestampConfig(props); - assertEquals(ExtractTimestampConfig.TimestampResolution.SECONDS, config.timestampResolution()); + assertThat(config.timestampResolution()).isEqualTo(ExtractTimestampConfig.TimestampResolution.SECONDS); } @Test @@ -81,7 +81,7 @@ void definedTimestampResolutionInMillis() { ExtractTimestampConfig.TimestampResolution.MILLISECONDS.resolution ); final var config = new ExtractTimestampConfig(props); - assertEquals(ExtractTimestampConfig.TimestampResolution.MILLISECONDS, config.timestampResolution()); + assertThat(config.timestampResolution()).isEqualTo(ExtractTimestampConfig.TimestampResolution.MILLISECONDS); } @Test @@ -92,11 +92,10 @@ void wrongTimestampResolution() { ExtractTimestampConfig.EPOCH_RESOLUTION_CONFIG, "foo" ); - final var e = assertThrows(ConfigException.class, () -> new ExtractTimestampConfig(props)); - assertEquals( - "Invalid value foo for configuration timestamp.resolution: " - + "Unsupported resolution type 'foo'. Supported are: milliseconds, seconds", - e.getMessage()); + assertThatThrownBy(() -> new ExtractTimestampConfig(props)) + .isInstanceOf(ConfigException.class) + .hasMessage("Invalid value foo for configuration timestamp.resolution: " + + "Unsupported resolution type 'foo'. Supported are: milliseconds, seconds"); } } diff --git a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampTest.java b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampTest.java index 4903b54..967f991 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTimestampTest.java @@ -35,8 +35,8 @@ import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.ValueSource; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; abstract class ExtractTimestampTest { private static final String FIELD = "test_field"; @@ -44,26 +44,26 @@ abstract class ExtractTimestampTest { @Test void recordNotStructOrMap() { final SinkRecord originalRecord = record(SchemaBuilder.INT8_SCHEMA, (byte) 123); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(keyOrValue() + " type must be STRUCT or MAP: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(keyOrValue() + " type must be STRUCT or MAP: " + originalRecord); } @Test void recordStructNull() { final Schema schema = SchemaBuilder.struct().schema(); final SinkRecord originalRecord = record(schema, null); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(keyOrValue() + " can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(keyOrValue() + " can't be null: " + originalRecord); } @Test void recordMapNull() { final SinkRecord originalRecord = record(null, null); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(keyOrValue() + " can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(keyOrValue() + " can't be null: " + originalRecord); } @Test @@ -72,17 +72,17 @@ void structWithMissingField() { .field(FIELD, Schema.INT64_SCHEMA) .build(); final SinkRecord originalRecord = record(null, new Struct(schema)); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(FIELD + " field must be present and its value can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " field must be present and its value can't be null: " + originalRecord); } @Test void mapWithMissingField() { final SinkRecord originalRecord = record(null, new HashMap<>()); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(FIELD + " field must be present and its value can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " field must be present and its value can't be null: " + originalRecord); } @Test @@ -91,9 +91,9 @@ void structWithNullField() { .field(FIELD, Schema.OPTIONAL_INT64_SCHEMA) .build(); final SinkRecord originalRecord = record(null, new Struct(schema).put(FIELD, null)); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(FIELD + " field must be present and its value can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " field must be present and its value can't be null: " + originalRecord); } @Test @@ -101,9 +101,9 @@ void mapWithNullField() { final HashMap valueMap = new HashMap<>(); valueMap.put(FIELD, null); final SinkRecord originalRecord = record(null, valueMap); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(FIELD + " field must be present and its value can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " field must be present and its value can't be null: " + originalRecord); } @Test @@ -112,10 +112,9 @@ void structWithFieldOfIncorrectType() { .field(FIELD, Schema.STRING_SCHEMA) .build(); final SinkRecord originalRecord = record(null, new Struct(schema).put(FIELD, "aaa")); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(FIELD + " field must be INT64 or org.apache.kafka.connect.data.Timestamp: " - + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " field must be INT64 or org.apache.kafka.connect.data.Timestamp: " + originalRecord); } @Test @@ -123,10 +122,9 @@ void mapWithFieldOfIncorrectType() { final HashMap valueMap = new HashMap<>(); valueMap.put(FIELD, "aaa"); final SinkRecord originalRecord = record(null, valueMap); - final Throwable e = assertThrows(DataException.class, () -> transformation().apply(originalRecord)); - assertEquals(FIELD + " field must be INT64 or org.apache.kafka.connect.data.Timestamp: " - + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation().apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " field must be INT64 or org.apache.kafka.connect.data.Timestamp: " + originalRecord); } @ParameterizedTest @@ -145,7 +143,7 @@ void structWithOptionalIntField(final boolean optional) { final long timestamp = 11363151277L; final SinkRecord originalRecord = record(null, new Struct(schema).put(FIELD, timestamp)); final SinkRecord transformedRecord = transformation().apply(originalRecord); - assertEquals(setNewTimestamp(originalRecord, timestamp), transformedRecord); + assertThat(transformedRecord).isEqualTo(setNewTimestamp(originalRecord, timestamp)); } @ParameterizedTest @@ -164,7 +162,7 @@ void structWithOptIntField(final boolean optional) { final long timestamp = 11363151277L; final SinkRecord originalRecord = record(null, new Struct(schema).put(FIELD, timestamp)); final SinkRecord transformedRecord = transformation().apply(originalRecord); - assertEquals(setNewTimestamp(originalRecord, timestamp), transformedRecord); + assertThat(transformedRecord).isEqualTo(setNewTimestamp(originalRecord, timestamp)); } @ParameterizedTest @@ -186,7 +184,7 @@ void structWithIntField(final ExtractTimestampConfig.TimestampResolution tsResol props.put(ExtractTimestampConfig.EPOCH_RESOLUTION_CONFIG, tsResolution.resolution()); final SinkRecord originalRecord = record(null, new Struct(schema).put(FIELD, timestamp)); final SinkRecord transformedRecord = transformation(props).apply(originalRecord); - assertEquals(setNewTimestamp(originalRecord, instance.toEpochMilli()), transformedRecord); + assertThat(transformedRecord).isEqualTo(setNewTimestamp(originalRecord, instance.toEpochMilli())); } @ParameterizedTest @@ -207,7 +205,7 @@ void mapWithIntField(final ExtractTimestampConfig.TimestampResolution tsResoluti } final SinkRecord originalRecord = record(null, Map.of(FIELD, timestamp)); final var transformedRecord = transformation(props).apply(originalRecord); - assertEquals(setNewTimestamp(originalRecord, instance.toEpochMilli()), transformedRecord); + assertThat(transformedRecord).isEqualTo(setNewTimestamp(originalRecord, instance.toEpochMilli())); } @ParameterizedTest @@ -225,7 +223,7 @@ void structWithTimestampField(final ExtractTimestampConfig.TimestampResolution t props.put(ExtractTimestampConfig.EPOCH_RESOLUTION_CONFIG, tsResolution.resolution()); final SinkRecord originalRecord = record(null, new Struct(schema).put(FIELD, Date.from(instant))); final SinkRecord transformedRecord = transformation(props).apply(originalRecord); - assertEquals(setNewTimestamp(originalRecord, instant.toEpochMilli()), transformedRecord); + assertThat(transformedRecord).isEqualTo(setNewTimestamp(originalRecord, instant.toEpochMilli())); } @ParameterizedTest @@ -240,7 +238,7 @@ void mapWithTimestampField(final ExtractTimestampConfig.TimestampResolution tsRe props.put(ExtractTimestampConfig.EPOCH_RESOLUTION_CONFIG, tsResolution.resolution()); final SinkRecord originalRecord = record(null, Map.of(FIELD, Date.from(instant))); final SinkRecord transformedRecord = transformation(props).apply(originalRecord); - assertEquals(setNewTimestamp(originalRecord, instant.toEpochMilli()), transformedRecord); + assertThat(transformedRecord).isEqualTo(setNewTimestamp(originalRecord, instant.toEpochMilli())); } private ExtractTimestamp transformation() { diff --git a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicConfigTest.java b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicConfigTest.java index df1d1b3..66093d9 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicConfigTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicConfigTest.java @@ -18,22 +18,20 @@ import java.util.HashMap; import java.util.Map; -import java.util.Optional; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.assertj.core.api.Assertions.assertThat; class ExtractTopicConfigTest { @Test void defaults() { final Map props = new HashMap<>(); final ExtractTopicConfig config = new ExtractTopicConfig(props); - assertEquals(Optional.empty(), config.fieldName()); - assertFalse(config.skipMissingOrNull()); + assertThat(config.fieldName()).isNotPresent(); + assertThat(config.skipMissingOrNull()).isFalse(); } @ParameterizedTest @@ -42,7 +40,7 @@ void skipMissingOrNull(final boolean skipMissingOrNull) { final Map props = new HashMap<>(); props.put("skip.missing.or.null", Boolean.toString(skipMissingOrNull)); final ExtractTopicConfig config = new ExtractTopicConfig(props); - assertEquals(skipMissingOrNull, config.skipMissingOrNull()); + assertThat(config.skipMissingOrNull()).isEqualTo(skipMissingOrNull); } @Test @@ -50,7 +48,7 @@ void emptyFieldName() { final Map props = new HashMap<>(); props.put("field.name", ""); final ExtractTopicConfig config = new ExtractTopicConfig(props); - assertEquals(Optional.empty(), config.fieldName()); + assertThat(config.fieldName()).isNotPresent(); } @Test @@ -58,6 +56,6 @@ void definedFieldName() { final Map props = new HashMap<>(); props.put("field.name", "test"); final ExtractTopicConfig config = new ExtractTopicConfig(props); - assertEquals(Optional.of("test"), config.fieldName()); + assertThat(config.fieldName()).hasValue("test"); } } diff --git a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicTest.java b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicTest.java index 1e4b2ba..c1cbd50 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/ExtractTopicTest.java @@ -31,8 +31,8 @@ import org.junit.jupiter.params.provider.NullAndEmptySource; import org.junit.jupiter.params.provider.ValueSource; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; abstract class ExtractTopicTest { @@ -43,9 +43,9 @@ abstract class ExtractTopicTest { @ValueSource(booleans = { true, false }) void nullSchema(final boolean skipMissingOrNull) { final SinkRecord originalRecord = record(null, null); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)); - assertEquals(dataPlace() + " schema can't be null: " + originalRecord, e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " schema can't be null: " + originalRecord); } @ParameterizedTest @@ -53,82 +53,75 @@ void nullSchema(final boolean skipMissingOrNull) { void noFieldName_UnsupportedType(final boolean skipMissingOrNull) { final Schema schema = SchemaBuilder.struct().build(); final SinkRecord originalRecord = record(schema, new Struct(schema)); - final Throwable e = assertThrows(DataException.class, - () -> transformation(null, skipMissingOrNull).apply(originalRecord)); - assertEquals(dataPlace() - + " schema type must be " - + "[INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, STRING]" - + " if field name is not specified: " - + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(null, skipMissingOrNull).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " schema type must be " + + "[INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, STRING] " + + "if field name is not specified: " + originalRecord); } @ParameterizedTest @NullAndEmptySource void noFieldName_NullOrEmptyValue_NoSkip(final String value) { - final Schema schema = SchemaBuilder.STRING_SCHEMA; + final Schema schema = Schema.STRING_SCHEMA; final SinkRecord originalRecord = record(schema, value); - final Throwable e = assertThrows(DataException.class, - () -> transformation(null, false).apply(originalRecord)); - assertEquals(dataPlace() + " can't be null or empty: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(null, false).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " can't be null or empty: " + originalRecord); } @ParameterizedTest @NullAndEmptySource void noFieldName_NullOrEmptyValue_Skip(final String value) { - final Schema schema = SchemaBuilder.STRING_SCHEMA; + final Schema schema = Schema.STRING_SCHEMA; final SinkRecord originalRecord = record(schema, value); final SinkRecord result = transformation(null, true).apply(originalRecord); - assertEquals(originalRecord, result); + assertThat(result).isEqualTo(originalRecord); } @Test void noFieldName_NormalInt64Value() { - final Schema schema = SchemaBuilder.INT64_SCHEMA; + final Schema schema = Schema.INT64_SCHEMA; final SinkRecord originalRecord = record(schema, 123L); final SinkRecord result = transformation(null, false).apply(originalRecord); - assertEquals(setNewTopic(originalRecord, "123"), result); + assertThat(result).isEqualTo(setNewTopic(originalRecord, "123")); } @Test void noFieldName_NormalBooleanValue() { - final Schema schema = SchemaBuilder.BOOLEAN_SCHEMA; + final Schema schema = Schema.BOOLEAN_SCHEMA; final SinkRecord originalRecord = record(schema, false); final SinkRecord result = transformation(null, false).apply(originalRecord); - assertEquals(setNewTopic(originalRecord, "false"), result); + assertThat(result).isEqualTo(setNewTopic(originalRecord, "false")); } @Test void noFieldName_NormalStringValue() { - final Schema schema = SchemaBuilder.STRING_SCHEMA; + final Schema schema = Schema.STRING_SCHEMA; final SinkRecord originalRecord = record(schema, NEW_TOPIC); final SinkRecord result = transformation(null, false).apply(originalRecord); - assertEquals(setNewTopic(originalRecord, NEW_TOPIC), result); + assertThat(result).isEqualTo(setNewTopic(originalRecord, NEW_TOPIC)); } @ParameterizedTest @ValueSource(booleans = { true, false }) void fieldName_NonStruct(final boolean skipMissingOrNull) { - final SinkRecord originalRecord = record(SchemaBuilder.INT8_SCHEMA, "some"); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)); - assertEquals(dataPlace() + " schema type must be STRUCT if field name is specified: " - + originalRecord, - e.getMessage()); + final SinkRecord originalRecord = record(Schema.INT8_SCHEMA, "some"); + assertThatThrownBy(() -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " schema type must be STRUCT if field name is specified: " + originalRecord); } @ParameterizedTest @ValueSource(booleans = { true, false }) void fieldName_NullStruct(final boolean skipMissingOrNull) { final Schema schema = SchemaBuilder.struct() - .field(FIELD, SchemaBuilder.STRING_SCHEMA) + .field(FIELD, Schema.STRING_SCHEMA) .schema(); final SinkRecord originalRecord = record(schema, null); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)); - assertEquals(dataPlace() + " can't be null if field name is specified: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " can't be null if field name is specified: " + originalRecord); } @ParameterizedTest @@ -140,12 +133,10 @@ void fieldName_UnsupportedTypeInField(final boolean skipMissingOrNull) { .schema(); final SinkRecord originalRecord = record( schema, new Struct(schema).put(FIELD, new Struct(innerSchema))); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)); - assertEquals(FIELD + " schema type in " + dataPlace() + " must be " - + "[INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, STRING]" - + ": " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, skipMissingOrNull).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " schema type in " + dataPlace() + + " must be [INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, STRING]: " + originalRecord); } @ParameterizedTest @@ -153,17 +144,16 @@ void fieldName_UnsupportedTypeInField(final boolean skipMissingOrNull) { @NullAndEmptySource void fieldName_NullOrEmptyValue_NoSkip(final String value) { final Schema schema = SchemaBuilder.struct() - .field(FIELD, SchemaBuilder.OPTIONAL_STRING_SCHEMA) + .field(FIELD, Schema.OPTIONAL_STRING_SCHEMA) .schema(); final Struct struct = new Struct(schema); if (!"missing".equals(value)) { struct.put(FIELD, value); } final SinkRecord originalRecord = record(schema, struct); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, false).apply(originalRecord)); - assertEquals(FIELD + " in " + dataPlace() + " can't be null or empty: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, false).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " in " + dataPlace() + " can't be null or empty: " + originalRecord); } @ParameterizedTest @@ -171,7 +161,7 @@ void fieldName_NullOrEmptyValue_NoSkip(final String value) { @NullAndEmptySource void fieldName_NullOrEmptyValueOrMissingField_Skip(final String value) { final Schema schema = SchemaBuilder.struct() - .field(FIELD, SchemaBuilder.OPTIONAL_STRING_SCHEMA) + .field(FIELD, Schema.OPTIONAL_STRING_SCHEMA) .schema(); final Struct struct = new Struct(schema); if (!"missing".equals(value)) { @@ -179,17 +169,16 @@ void fieldName_NullOrEmptyValueOrMissingField_Skip(final String value) { } final SinkRecord originalRecord = record(schema, struct); final SinkRecord result = transformation(FIELD, true).apply(originalRecord); - assertEquals(originalRecord, result); + assertThat(result).isEqualTo(originalRecord); } @Test void fieldName_MissingFieldInSchema_NoSkip() { final Schema schema = SchemaBuilder.struct().schema(); final SinkRecord originalRecord = record(schema, new Struct(schema)); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, false).apply(originalRecord)); - assertEquals(FIELD + " in " + dataPlace() + " schema can't be missing: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, false).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " in " + dataPlace() + " schema can't be missing: " + originalRecord); } @Test @@ -197,37 +186,37 @@ void fieldName_MissingFieldInSchema_Skip() { final Schema schema = SchemaBuilder.struct().schema(); final SinkRecord originalRecord = record(schema, new Struct(schema)); final SinkRecord result = transformation(FIELD, true).apply(originalRecord); - assertEquals(originalRecord, result); + assertThat(result).isEqualTo(originalRecord); } @Test void fieldName_NormalIntValue() { final Schema schema = SchemaBuilder.struct() - .field(FIELD, SchemaBuilder.INT64_SCHEMA) + .field(FIELD, Schema.INT64_SCHEMA) .schema(); final SinkRecord originalRecord = record(schema, new Struct(schema).put(FIELD, 123L)); final SinkRecord result = transformation(FIELD, true).apply(originalRecord); - assertEquals(setNewTopic(originalRecord, "123"), result); + assertThat(result).isEqualTo(setNewTopic(originalRecord, "123")); } @Test void fieldName_NormalBooleanValue() { final Schema schema = SchemaBuilder.struct() - .field(FIELD, SchemaBuilder.BOOLEAN_SCHEMA) + .field(FIELD, Schema.BOOLEAN_SCHEMA) .schema(); final SinkRecord originalRecord = record(schema, new Struct(schema).put(FIELD, false)); final SinkRecord result = transformation(FIELD, true).apply(originalRecord); - assertEquals(setNewTopic(originalRecord, "false"), result); + assertThat(result).isEqualTo(setNewTopic(originalRecord, "false")); } @Test void fieldName_NormalStringValue() { final Schema schema = SchemaBuilder.struct() - .field(FIELD, SchemaBuilder.STRING_SCHEMA) + .field(FIELD, Schema.STRING_SCHEMA) .schema(); final SinkRecord originalRecord = record(schema, new Struct(schema).put(FIELD, NEW_TOPIC)); final SinkRecord result = transformation(FIELD, true).apply(originalRecord); - assertEquals(setNewTopic(originalRecord, NEW_TOPIC), result); + assertThat(result).isEqualTo(setNewTopic(originalRecord, NEW_TOPIC)); } private ExtractTopic transformation(final String fieldName, final boolean skipMissingOrNull) { diff --git a/src/test/java/io/aiven/kafka/connect/transforms/FilterByFieldValueTest.java b/src/test/java/io/aiven/kafka/connect/transforms/FilterByFieldValueTest.java index 97a9ab0..a83fe03 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/FilterByFieldValueTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/FilterByFieldValueTest.java @@ -28,8 +28,7 @@ import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; +import static org.assertj.core.api.Assertions.assertThat; class FilterByFieldValueTest { @@ -43,21 +42,22 @@ void shouldFilterOutValueRecordsEqualsToReadEvents() { "field.value.matches", "false" )); - assertNull( - filter.apply( - prepareStructRecord( - struct -> { - }, - struct -> struct.put("op", "r") - )), - "Record with op 'r' should be filtered out"); + assertThat(filter.apply( + prepareStructRecord( + struct -> { + }, + struct -> struct.put("op", "r") + ))) + .as("Record with op 'r' should be filtered out") + .isNull(); final SourceRecord record = prepareStructRecord( struct -> { }, struct -> struct.put("op", "u") ); - assertEquals(record, filter.apply(record), - "Record with op not 'r' should be not filtered out"); + assertThat(filter.apply(record)) + .as("Record with op not 'r' should be not filtered out") + .isEqualTo(record); } @Test @@ -69,15 +69,19 @@ void shouldFilterOutKeyRecordsEqualsToId() { "field.value.matches", "false" )); - assertNull(filter.apply(prepareStructRecord( + assertThat(filter.apply(prepareStructRecord( struct -> struct.put("id", "A123"), struct -> { - })), "Record with id 'A132' should be filtered out"); + }))) + .as("Record with id 'A132' should be filtered out") + .isNull(); final SourceRecord record = prepareStructRecord( struct -> struct.put("id", "A111"), struct -> { }); - assertEquals(record, filter.apply(record), "Record with id not 'A132' should not be filtered out"); + assertThat(filter.apply(record)) + .as("Record with id not 'A132' should not be filtered out") + .isEqualTo(record); } @Test @@ -89,20 +93,22 @@ void shouldFilterOutValueRecordsNotEqualsReadEvents() { "field.value.matches", "true" )); - assertNull( - filter.apply( - prepareStructRecord( - struct -> { - }, - struct -> struct.put("op", "u") - )), - "Record with op not equal to 'r' should be filtered out"); + assertThat(filter.apply( + prepareStructRecord( + struct -> { + }, + struct -> struct.put("op", "u") + ))) + .as("Record with op not equal to 'r' should be filtered out") + .isNull(); final SourceRecord record = prepareStructRecord( struct -> { }, struct -> struct.put("op", "r") ); - assertEquals(record, filter.apply(record), "Record with op equal to 'r' should not be filtered out"); + assertThat(filter.apply(record)) + .as("Record with op equal to 'r' should not be filtered out") + .isEqualTo(record); } @Test @@ -114,20 +120,22 @@ void shouldFilterOutKeyRecordsNotEqualsToId() { "field.value.matches", "true" )); - assertNull( - filter.apply( - prepareStructRecord( - struct -> struct.put("id", "111"), - struct -> { - } - )), - "Record with id not equal to 'A132' should be filtered out"); + assertThat(filter.apply( + prepareStructRecord( + struct -> struct.put("id", "111"), + struct -> { + } + ))) + .as("Record with id not equal to 'A132' should be filtered out") + .isNull(); final SourceRecord record = prepareStructRecord( struct -> struct.put("id", "A123"), struct -> { } ); - assertEquals(record, filter.apply(record), "Record with id equal to 'A132' should not be filtered out"); + assertThat(filter.apply(record)) + .as("Record with id equal to 'A132' should not be filtered out") + .isEqualTo(record); } @Test @@ -139,10 +147,13 @@ void shouldFilterOutMapValueRecordsWithRegex() { configs.put("field.value.matches", "false"); filterByFieldValue.configure(configs); - assertNull(filterByFieldValue.apply(prepareRecord(() -> "A42", () -> Map.of("language", "Javascript"))), - "The record should be filtered out"); + assertThat(filterByFieldValue.apply(prepareRecord(() -> "A42", () -> Map.of("language", "Javascript")))) + .as("The record should be filtered out") + .isNull(); final SourceRecord record = prepareRecord(() -> "A42", () -> Map.of("language", "Rust")); - assertEquals(record, filterByFieldValue.apply(record), "The record should not be filtered out"); + assertThat(filterByFieldValue.apply(record)) + .as("The record should not be filtered out") + .isEqualTo(record); } @Test @@ -154,10 +165,13 @@ void shouldFilterOutMapKeyRecordsWithRegex() { configs.put("field.value.matches", "false"); filterByFieldValue.configure(configs); - assertNull(filterByFieldValue.apply(prepareRecord(() -> Map.of("language", "Javascript"), () -> "A42")), - "The record should be filtered out"); + assertThat(filterByFieldValue.apply(prepareRecord(() -> Map.of("language", "Javascript"), () -> "A42"))) + .as("The record should be filtered out") + .isNull(); final SourceRecord record = prepareRecord(() -> Map.of("language", "Rust"), () -> "A42"); - assertEquals(record, filterByFieldValue.apply(record), "The record should not be filtered out"); + assertThat(filterByFieldValue.apply(record)) + .as("The record should not be filtered out") + .isEqualTo(record); } @Test @@ -168,10 +182,13 @@ void shouldFilterOutRawKeyRecords() { configs.put("field.value.matches", "false"); filterByFieldValue.configure(configs); - assertNull(filterByFieldValue.apply(prepareRecord(() -> "A42", () -> Map.of("language", "Javascript"))), - "The record should be filtered out"); + assertThat(filterByFieldValue.apply(prepareRecord(() -> "A42", () -> Map.of("language", "Javascript")))) + .as("The record should be filtered out") + .isNull(); final SourceRecord record = prepareRecord(() -> "43", () -> Map.of("language", "Rust")); - assertEquals(record, filterByFieldValue.apply(record), "The record should be filtered out"); + assertThat(filterByFieldValue.apply(record)) + .as("The record should be filtered out") + .isEqualTo(record); } @Test @@ -182,10 +199,13 @@ void shouldFilterOutRawValueRecords() { configs.put("field.value.matches", "false"); filterByFieldValue.configure(configs); - assertNull(filterByFieldValue.apply(prepareRecord(() -> Map.of("language", "Javascript"), () -> "A42")), - "The record should be filtered out"); + assertThat(filterByFieldValue.apply(prepareRecord(() -> Map.of("language", "Javascript"), () -> "A42"))) + .as("The record should be filtered out") + .isNull(); final SourceRecord record = prepareRecord(() -> Map.of("language", "Rust"), () -> "43"); - assertEquals(record, filterByFieldValue.apply(record), "The record should be filtered out"); + assertThat(filterByFieldValue.apply(record)) + .as("The record should be filtered out") + .isEqualTo(record); } @Test @@ -196,10 +216,13 @@ void shouldFilterOutRawNumericValueRecords() { configs.put("field.value.matches", "false"); filterByFieldValue.configure(configs); - assertNull(filterByFieldValue.apply(prepareRecord(() -> Map.of("language", "Javascript"), () -> (byte) 42)), - "The record should be filtered out"); + assertThat(filterByFieldValue.apply(prepareRecord(() -> Map.of("language", "Javascript"), () -> (byte) 42))) + .as("The record should be filtered out") + .isNull(); final SourceRecord record = prepareRecord(() -> Map.of("language", "Rust"), () -> (byte) 43); - assertEquals(record, filterByFieldValue.apply(record), "The record should be filtered out"); + assertThat(filterByFieldValue.apply(record)) + .as("The record should be filtered out") + .isEqualTo(record); } private SourceRecord prepareRecord( diff --git a/src/test/java/io/aiven/kafka/connect/transforms/HashConfigTest.java b/src/test/java/io/aiven/kafka/connect/transforms/HashConfigTest.java index ab0cc36..acde075 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/HashConfigTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/HashConfigTest.java @@ -18,7 +18,6 @@ import java.util.HashMap; import java.util.Map; -import java.util.Optional; import org.apache.kafka.common.config.ConfigException; @@ -26,17 +25,16 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; class HashConfigTest { @Test void defaults() { final Map props = new HashMap<>(); - final Throwable e = assertThrows(ConfigException.class, - () -> new HashConfig(props)); - assertEquals("Missing required configuration \"function\" which has no default value.", - e.getMessage()); + assertThatThrownBy(() -> new HashConfig(props)) + .isInstanceOf(ConfigException.class) + .hasMessage("Missing required configuration \"function\" which has no default value."); } @ParameterizedTest @@ -46,7 +44,7 @@ void skipMissingOrNull(final boolean skipMissingOrNull) { props.put("skip.missing.or.null", Boolean.toString(skipMissingOrNull)); props.put("function", "sha256"); final HashConfig config = new HashConfig(props); - assertEquals(skipMissingOrNull, config.skipMissingOrNull()); + assertThat(config.skipMissingOrNull()).isEqualTo(skipMissingOrNull); } @Test @@ -54,7 +52,7 @@ void hashFunctionMd5() { final Map props = new HashMap<>(); props.put("function", "md5"); final HashConfig config = new HashConfig(props); - assertEquals(HashConfig.HashFunction.MD5, config.hashFunction()); + assertThat(config.hashFunction()).isEqualTo(HashConfig.HashFunction.MD5); } @Test @@ -62,7 +60,7 @@ void hashFunctionSha1() { final Map props = new HashMap<>(); props.put("function", "sha1"); final HashConfig config = new HashConfig(props); - assertEquals(HashConfig.HashFunction.SHA1, config.hashFunction()); + assertThat(config.hashFunction()).isEqualTo(HashConfig.HashFunction.SHA1); } @Test @@ -70,7 +68,7 @@ void hashFunctionSha256() { final Map props = new HashMap<>(); props.put("function", "sha256"); final HashConfig config = new HashConfig(props); - assertEquals(HashConfig.HashFunction.SHA256, config.hashFunction()); + assertThat(config.hashFunction()).isEqualTo(HashConfig.HashFunction.SHA256); } @Test @@ -79,7 +77,7 @@ void emptyFieldName() { props.put("field.name", ""); props.put("function", "sha256"); final HashConfig config = new HashConfig(props); - assertEquals(Optional.empty(), config.fieldName()); + assertThat(config.fieldName()).isNotPresent(); } @Test @@ -88,7 +86,7 @@ void definedFieldName() { props.put("field.name", "test"); props.put("function", "sha256"); final HashConfig config = new HashConfig(props); - assertEquals(Optional.of("test"), config.fieldName()); - assertEquals(HashConfig.HashFunction.SHA256, config.hashFunction()); + assertThat(config.fieldName()).hasValue("test"); + assertThat(config.hashFunction()).isEqualTo(HashConfig.HashFunction.SHA256); } } diff --git a/src/test/java/io/aiven/kafka/connect/transforms/HashTest.java b/src/test/java/io/aiven/kafka/connect/transforms/HashTest.java index 810afdd..d7e274a 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/HashTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/HashTest.java @@ -30,7 +30,8 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.jupiter.api.Assertions.assertThrows; abstract class HashTest { @@ -71,10 +72,9 @@ abstract class HashTest { void noFieldName_NullValue_NoSkip() { final Schema schema = SchemaBuilder.STRING_SCHEMA; final SinkRecord originalRecord = record(schema, null); - final Throwable e = assertThrows(DataException.class, - () -> transformation(null, false, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(dataPlace() + " can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(null, false, DEFAULT_HASH_FUNCTION).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " can't be null: " + originalRecord); } @Test @@ -84,7 +84,7 @@ void noFieldName_NullValue_Skip() { final Hash transform = transformation(null, true, DEFAULT_HASH_FUNCTION); final SinkRecord result = transform.apply(originalRecord); // No changes. - assertEquals(originalRecord, result); + assertThat(result).isEqualTo(originalRecord); } @Test @@ -92,7 +92,7 @@ void nullSchema() { final SinkRecord originalRecord = record(null, null); final Throwable e = assertThrows(DataException.class, () -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(dataPlace() + " schema can't be null: " + originalRecord, e.getMessage()); + assertThat(e.getMessage()).isEqualTo(dataPlace() + " schema can't be null: " + originalRecord); } @Test @@ -101,10 +101,9 @@ void noFieldName_UnsupportedType() { final SinkRecord originalRecord = record(schema, new Struct(schema)); final Throwable e = assertThrows(DataException.class, () -> transformation(null, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(dataPlace() - + " schema type must be STRING if field name is not specified: " - + originalRecord, - e.getMessage()); + assertThat(e.getMessage()).isEqualTo(dataPlace() + + " schema type must be STRING if field name is not specified: " + + originalRecord); } @ParameterizedTest @@ -115,7 +114,7 @@ void noFieldName_NormalStringValue(final String hashFunction) { final Hash transform = transformation(null, false, hashFunction); final SinkRecord result = transform.apply(originalRecord); final String newValue = hash(hashFunction, NON_EMPTY_FIELD_VALUE); - assertEquals(setNewValue(originalRecord, newValue), result); + assertThat(result).isEqualTo(setNewValue(originalRecord, newValue)); } @ParameterizedTest @@ -126,7 +125,7 @@ void noFieldName_EmptyStringValue(final String hashFunction) { final Hash transform = transformation(null, false, hashFunction); final SinkRecord result = transform.apply(originalRecord); final String newValue = hash(hashFunction, EMPTY_FIELD_VALUE); - assertEquals(setNewValue(originalRecord, newValue), result); + assertThat(result).isEqualTo(setNewValue(originalRecord, newValue)); } @Test @@ -135,10 +134,9 @@ void fieldName_NullValue_NoSkip() { .field(FIELD, SchemaBuilder.OPTIONAL_STRING_SCHEMA) .schema(); final SinkRecord originalRecord = record(schema, new Struct(schema).put(FIELD, null)); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, false, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(FIELD + " in " + dataPlace() + " can't be null: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, false, DEFAULT_HASH_FUNCTION).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " in " + dataPlace() + " can't be null: " + originalRecord); } @Test @@ -146,10 +144,9 @@ void fieldName_MissingValue_NoSkip() { final Schema schema = SchemaBuilder.struct() .schema(); final SinkRecord originalRecord = record(schema, new Struct(schema)); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, false, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(FIELD + " in " + dataPlace() + " schema can't be missing: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, false, DEFAULT_HASH_FUNCTION).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " in " + dataPlace() + " schema can't be missing: " + originalRecord); } @Test @@ -165,7 +162,7 @@ void fieldName_NullValue_Skip() { final Hash transform = transformation(FIELD, true, DEFAULT_HASH_FUNCTION); final SinkRecord result = transform.apply(originalRecord); // No changes. - assertEquals(originalRecord, result); + assertThat(result).isEqualTo(originalRecord); } @Test @@ -176,17 +173,15 @@ void fieldName_MissingValue_Skip() { final Hash transform = transformation(FIELD, true, DEFAULT_HASH_FUNCTION); final SinkRecord result = transform.apply(originalRecord); // No changes. - assertEquals(originalRecord, result); + assertThat(result).isEqualTo(originalRecord); } @Test void fieldName_NonStruct() { final SinkRecord originalRecord = record(SchemaBuilder.INT8_SCHEMA, "some"); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(dataPlace() + " schema type must be STRUCT if field name is specified: " - + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " schema type must be STRUCT if field name is specified: " + originalRecord); } @Test @@ -195,10 +190,9 @@ void fieldName_NullStruct() { .field(FIELD, SchemaBuilder.STRING_SCHEMA) .schema(); final SinkRecord originalRecord = record(schema, null); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(dataPlace() + " can't be null if field name is specified: " + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(dataPlace() + " can't be null if field name is specified: " + originalRecord); } @Test @@ -209,11 +203,9 @@ void fieldName_UnsupportedTypeInField() { .schema(); final SinkRecord originalRecord = record( schema, new Struct(schema).put(FIELD, new Struct(innerSchema))); - final Throwable e = assertThrows(DataException.class, - () -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)); - assertEquals(FIELD + " schema type in " + dataPlace() + " must be STRING: " - + originalRecord, - e.getMessage()); + assertThatThrownBy(() -> transformation(FIELD, true, DEFAULT_HASH_FUNCTION).apply(originalRecord)) + .isInstanceOf(DataException.class) + .hasMessage(FIELD + " schema type in " + dataPlace() + " must be STRING: " + originalRecord); } @ParameterizedTest @@ -232,7 +224,7 @@ void fieldName_NormalStringValue(final String hashFunction) { final Struct newValue = new Struct(schema) .put(FIELD, hash(hashFunction, NON_EMPTY_FIELD_VALUE)) .put(UNAFFECTED_FIELD, UNAFFECTED_FIELD_VALUE); - assertEquals(setNewValue(originalRecord, newValue), result); + assertThat(result).isEqualTo(setNewValue(originalRecord, newValue)); } @ParameterizedTest @@ -251,7 +243,7 @@ void fieldName_EmptyStringValue(final String hashFunction) { final Struct newValue = new Struct(schema) .put(FIELD, hash(hashFunction, EMPTY_FIELD_VALUE)) .put(UNAFFECTED_FIELD, UNAFFECTED_FIELD_VALUE); - assertEquals(setNewValue(originalRecord, newValue), result); + assertThat(result).isEqualTo(setNewValue(originalRecord, newValue)); } @ParameterizedTest @@ -264,7 +256,7 @@ void sameValueSameHash(final String hashFunction) { final SinkRecord originalRecord = record(schema, NON_EMPTY_FIELD_VALUE); final SinkRecord result = transform.apply(originalRecord); final String newValue = hash(hashFunction, NON_EMPTY_FIELD_VALUE); - assertEquals(setNewValue(originalRecord, newValue), result); + assertThat(result).isEqualTo(setNewValue(originalRecord, newValue)); } } diff --git a/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerConfigTest.java b/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerConfigTest.java index 02854ae..29bdb20 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerConfigTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerConfigTest.java @@ -23,23 +23,17 @@ import org.junit.jupiter.api.Test; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; final class TombstoneHandlerConfigTest { @Test final void failOnUnknownBehaviorName() { - final Throwable t = - assertThrows( - ConfigException.class, - () -> new TombstoneHandlerConfig(newBehaviorProps("asdasdsadas")) - ); - assertEquals( - "Invalid value asdasdsadas for configuration behavior: " - + "Unsupported behavior name: asdasdsadas. Supported are: drop_silent,drop_warn,fail", - t.getMessage() - ); + assertThatThrownBy(() -> new TombstoneHandlerConfig(newBehaviorProps("asdasdsadas"))) + .isInstanceOf(ConfigException.class) + .hasMessage("Invalid value asdasdsadas for configuration behavior: " + + "Unsupported behavior name: asdasdsadas. Supported are: drop_silent,drop_warn,fail"); } @Test @@ -51,7 +45,7 @@ final void acceptCorrectBehaviorNames() { TombstoneHandlerConfig.Behavior.DROP_SILENT.name() ) ); - assertEquals(TombstoneHandlerConfig.Behavior.DROP_SILENT, c.getBehavior()); + assertThat(c.getBehavior()).isEqualTo(TombstoneHandlerConfig.Behavior.DROP_SILENT); c = new TombstoneHandlerConfig( @@ -59,7 +53,7 @@ final void acceptCorrectBehaviorNames() { TombstoneHandlerConfig.Behavior.FAIL.name().toLowerCase() ) ); - assertEquals(TombstoneHandlerConfig.Behavior.FAIL, c.getBehavior()); + assertThat(c.getBehavior()).isEqualTo(TombstoneHandlerConfig.Behavior.FAIL); c = new TombstoneHandlerConfig( @@ -67,16 +61,14 @@ final void acceptCorrectBehaviorNames() { "Drop_WArn" ) ); - assertEquals(TombstoneHandlerConfig.Behavior.DROP_WARN, c.getBehavior()); + assertThat(c.getBehavior()).isEqualTo(TombstoneHandlerConfig.Behavior.DROP_WARN); } @Test final void failOnEmptyBehaviorName() { - final Throwable t = assertThrows( - ConfigException.class, - () -> new TombstoneHandlerConfig(newBehaviorProps("")) - ); - assertEquals("Invalid value for configuration behavior: String must be non-empty", t.getMessage()); + assertThatThrownBy(() -> new TombstoneHandlerConfig(newBehaviorProps(""))) + .isInstanceOf(ConfigException.class) + .hasMessage("Invalid value for configuration behavior: String must be non-empty"); } private Map newBehaviorProps(final String bv) { diff --git a/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerTest.java b/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerTest.java index c50e845..1fb5943 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/TombstoneHandlerTest.java @@ -27,30 +27,22 @@ import org.junit.jupiter.api.Test; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; final class TombstoneHandlerTest { @Test final void shouldDropTombstoneRecord() { - assertNull(tombstoneHandler(Behavior.DROP_SILENT) - .apply(record(null)) - ); + assertThat(tombstoneHandler(Behavior.DROP_SILENT).apply(record(null))) + .isNull(); } @Test final void shouldThrowDataAccessExceptionOnTombstoneRecords() { - final Throwable t = assertThrows( - DataException.class, - () -> tombstoneHandler(Behavior.FAIL) - .apply(record(null)) - ); - assertEquals( - "Tombstone record encountered, failing due to configured 'fail' behavior", - t.getMessage() - ); + assertThatThrownBy(() -> tombstoneHandler(Behavior.FAIL).apply(record(null))) + .isInstanceOf(DataException.class) + .hasMessage("Tombstone record encountered, failing due to configured 'fail' behavior"); } private SinkRecord record(final Object value) { diff --git a/src/test/java/io/aiven/kafka/connect/transforms/utils/HexTest.java b/src/test/java/io/aiven/kafka/connect/transforms/utils/HexTest.java index fb1a1f1..fd771ad 100644 --- a/src/test/java/io/aiven/kafka/connect/transforms/utils/HexTest.java +++ b/src/test/java/io/aiven/kafka/connect/transforms/utils/HexTest.java @@ -26,13 +26,13 @@ import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.assertj.core.api.Assertions.assertThat; public class HexTest { @Test void testEncodeEmpty() { final byte[] bytes = new byte[0]; - assertEquals("", Hex.encode(bytes)); + assertThat(Hex.encode(bytes)).isEmpty(); } @Test @@ -41,7 +41,7 @@ void testEncodeSingleByte() { for (int i = 0; i < 256; i++) { final byte b = (byte) i; bytes[0] = b; - assertEquals(String.format("%02x", b), Hex.encode(bytes)); + assertThat(Hex.encode(bytes)).isEqualTo(String.format("%02x", b)); } } @@ -53,7 +53,7 @@ void testEncodeFromStrings() throws IOException, URISyntaxException { // Use the string as a byte array and hex-encode it. final byte[] bytes = s.getBytes(Charset.defaultCharset()); final String encoded = Hex.encode(bytes); - assertEquals(bytes.length * 2, encoded.length()); + assertThat(encoded).hasSize(bytes.length * 2); // Decode the string back and compare to the original. final char[] encodedChars = encoded.toCharArray(); @@ -62,7 +62,7 @@ void testEncodeFromStrings() throws IOException, URISyntaxException { final String s1 = new String(encodedChars, i, 2); decodedBytes[i / 2] = (byte) Integer.parseInt(s1, 16); } - assertEquals(new String(decodedBytes, Charset.defaultCharset()), s); + assertThat(s).isEqualTo(new String(decodedBytes, Charset.defaultCharset())); } } }