diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerException.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerException.java index edbdd5b51..3826e1f79 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerException.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerException.java @@ -8,6 +8,8 @@ /** * Generic Parallel Consumer {@link RuntimeException} parent. + * + * @author Antony Stubbs */ @StandardException public class ParallelConsumerException extends RuntimeException { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerOptions.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerOptions.java index 5d871da7f..ca12e8982 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerOptions.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/ParallelConsumerOptions.java @@ -25,6 +25,7 @@ /** * The options for the {@link AbstractParallelEoSStreamProcessor} system. * + * @author Antony Stubbs * @see #builder() * @see ParallelConsumerOptions.ParallelConsumerOptionsBuilder */ diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncoder.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncoder.java index 64e607de1..236b605ab 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncoder.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncoder.java @@ -37,6 +37,7 @@ * @see PartitionState#incompleteOffsets * @see RunLengthEncoder * @see OffsetBitSet + * @author Antony Stubbs */ @Slf4j public class BitSetEncoder extends OffsetEncoder { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncodingNotSupportedException.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncodingNotSupportedException.java index 0a0e1c340..b0b7b98d5 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncodingNotSupportedException.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/BitSetEncodingNotSupportedException.java @@ -1,12 +1,16 @@ package io.confluent.parallelconsumer.offsets; /*- - * Copyright (C) 2020-2021 Confluent, Inc. + * Copyright (C) 2020-2022 Confluent, Inc. */ -public class BitSetEncodingNotSupportedException extends EncodingNotSupportedException { - public BitSetEncodingNotSupportedException(String msg) { - super(msg); - } +import lombok.experimental.StandardException; +/** + * Thrown under situations where the {@link BitSetEncoder} would not be able to encode the given data. + * + * @author Antony Stubbs + */ +@StandardException +public class BitSetEncodingNotSupportedException extends EncodingNotSupportedException { } diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/ByteBufferEncoder.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/ByteBufferEncoder.java index 7459443b0..7b8f56351 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/ByteBufferEncoder.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/ByteBufferEncoder.java @@ -9,6 +9,12 @@ import static io.confluent.parallelconsumer.offsets.OffsetEncoding.ByteArray; import static io.confluent.parallelconsumer.offsets.OffsetEncoding.ByteArrayCompressed; +/** + * Encodes offsets into a {@link ByteBuffer}. Doesn't have any advantage over the {@link BitSetEncoder} and + * {@link RunLengthEncoder}, but can be useful for testing and comparison. + * + * @author Antony Stubbs + */ public class ByteBufferEncoder extends OffsetEncoder { private final ByteBuffer bytesBuffer; diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodedOffsetPair.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodedOffsetPair.java index b25774f79..609b90cd2 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodedOffsetPair.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodedOffsetPair.java @@ -23,6 +23,10 @@ import static io.confluent.parallelconsumer.offsets.OffsetSimpleSerialisation.deserialiseByteArrayToBitMapString; /** + * Encapsulates the encoding type, and the actual encoded data, when creating an offset map encoding. Central place for + * decoding the data. + * + * @author Antony Stubbs * @see #unwrap */ @Slf4j @@ -110,7 +114,8 @@ public HighestOffsetAndIncompletes getDecodedIncompletes(long baseOffset) { case BitSetV2Compressed -> deserialiseBitSetWrapToIncompletes(BitSetV2, baseOffset, decompressZstd(data)); case RunLengthV2 -> runLengthDecodeToIncompletes(encoding, baseOffset, data); case RunLengthV2Compressed -> runLengthDecodeToIncompletes(RunLengthV2, baseOffset, decompressZstd(data)); - default -> throw new UnsupportedOperationException("Encoding (" + encoding.description() + ") not supported"); + default -> + throw new UnsupportedOperationException("Encoding (" + encoding.description() + ") not supported"); }; return binaryArrayString; } diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodingNotSupportedException.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodingNotSupportedException.java index 378f9bd11..58097d6ea 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodingNotSupportedException.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/EncodingNotSupportedException.java @@ -5,12 +5,13 @@ */ import io.confluent.parallelconsumer.internal.InternalException; +import lombok.experimental.StandardException; -/*- - * Copyright (C) 2020-2021 Confluent, Inc. +/** + * Parent of the exceptions for when the {@link OffsetEncoder} cannot encode the given data. + * + * @author Antony Stubbs */ +@StandardException public class EncodingNotSupportedException extends InternalException { - public EncodingNotSupportedException(final String message) { - super(message); - } } diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/NoEncodingPossibleException.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/NoEncodingPossibleException.java index d7cce4a70..2b33ab767 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/NoEncodingPossibleException.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/NoEncodingPossibleException.java @@ -5,10 +5,13 @@ */ import io.confluent.parallelconsumer.internal.InternalException; +import lombok.experimental.StandardException; +/** + * Throw when for whatever reason, no encoding of the offsets is possible. + * + * @author Antony Stubbs + */ +@StandardException public class NoEncodingPossibleException extends InternalException { - - public NoEncodingPossibleException(String msg) { - super(msg); - } } diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetBitSet.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetBitSet.java index 0058a4994..cd846f139 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetBitSet.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetBitSet.java @@ -19,6 +19,7 @@ *

* todo unify or refactor with {@link BitSetEncoder}. Why was it ever seperate? * + * @author Antony Stubbs * @see BitSetEncoder */ @Slf4j diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetDecodingError.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetDecodingError.java index d69d6aec5..2a1a8f17e 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetDecodingError.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetDecodingError.java @@ -5,14 +5,15 @@ */ import io.confluent.parallelconsumer.internal.InternalException; +import lombok.experimental.StandardException; /*- * Error decoding offsets * * TODO should extend java.lang.Error ? + * + * @author Antony Stubbs */ +@StandardException public class OffsetDecodingError extends InternalException { - public OffsetDecodingError(final String s, final IllegalArgumentException a) { - super(s, a); - } } diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetEncoder.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetEncoder.java index 76f6ba7ef..6a97cbcee 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetEncoder.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetEncoder.java @@ -11,7 +11,9 @@ import java.nio.ByteBuffer; /** - * Base OffsetEncoder + * Base OffsetEncoder, defining the contract for encoding offset data. + * + * @author Antony Stubbs */ @Slf4j public abstract class OffsetEncoder { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetMapCodecManager.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetMapCodecManager.java index e71377a29..cd4010775 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetMapCodecManager.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetMapCodecManager.java @@ -34,6 +34,8 @@ *

  • byte2-3: Short: bitset size *
  • byte4-n: serialised {@link BitSet} * + * + * @author Antony Stubbs */ @Slf4j public class OffsetMapCodecManager { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetRunLength.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetRunLength.java index 92fe44cc6..e0784c16f 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetRunLength.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetRunLength.java @@ -1,7 +1,7 @@ package io.confluent.parallelconsumer.offsets; /*- - * Copyright (C) 2020-2021 Confluent, Inc. + * Copyright (C) 2020-2022 Confluent, Inc. */ import io.confluent.parallelconsumer.offsets.OffsetMapCodecManager.HighestOffsetAndIncompletes; import lombok.experimental.UtilityClass; @@ -18,6 +18,11 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; +/** + * Methods for encoding and decoding the run-lengths. + * + * @author Antony Stubbs + */ @Slf4j @UtilityClass public class OffsetRunLength { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimpleSerialisation.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimpleSerialisation.java index 7f9d4ae4a..cac345862 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimpleSerialisation.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimpleSerialisation.java @@ -1,7 +1,7 @@ package io.confluent.parallelconsumer.offsets; /*- - * Copyright (C) 2020-2021 Confluent, Inc. + * Copyright (C) 2020-2022 Confluent, Inc. */ import com.github.luben.zstd.ZstdInputStream; import com.github.luben.zstd.ZstdOutputStream; @@ -22,6 +22,11 @@ import static io.confluent.csid.utils.BackportUtils.readFully; +/** + * Methods for compressing, decompressing and encoding / encoding data. + * + * @author Antony Stubbs + */ @UtilityClass @Slf4j public class OffsetSimpleSerialisation { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimultaneousEncoder.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimultaneousEncoder.java index 91e3d290a..2fab0344e 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimultaneousEncoder.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/OffsetSimultaneousEncoder.java @@ -23,6 +23,7 @@ *

    * Have results in an accessible structure, easily selecting the highest compression. * + * @author Antony Stubbs * @see #invoke() */ @Slf4j diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunLengthEncoder.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunLengthEncoder.java index 9138db719..396159d59 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunLengthEncoder.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunLengthEncoder.java @@ -20,6 +20,8 @@ *

    * One such nature is that gaps between completed offsets get encoded as succeeded offsets. This doesn't matter because * they don't exist and we'll neve see them (they no longer exist in the source partition). + * + * @author Antony Stubbs */ public class RunLengthEncoder extends OffsetEncoder { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunlengthV1EncodingNotSupported.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunlengthV1EncodingNotSupported.java index 56360a57c..c333b7743 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunlengthV1EncodingNotSupported.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/offsets/RunlengthV1EncodingNotSupported.java @@ -1,10 +1,16 @@ package io.confluent.parallelconsumer.offsets; /*- - * Copyright (C) 2020-2021 Confluent, Inc. + * Copyright (C) 2020-2022 Confluent, Inc. */ + +import lombok.experimental.StandardException; + +/** + * Thrown when Runlength V1 encoding is not supported. + * + * @author Antony Stubbs + */ +@StandardException public class RunlengthV1EncodingNotSupported extends EncodingNotSupportedException { - public RunlengthV1EncodingNotSupported(final String msg) { - super(msg); - } } diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionState.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionState.java index d231130c8..b3e8309e5 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionState.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionState.java @@ -32,6 +32,7 @@ /** * Our view of the state of the partitions that we've been assigned. * + * @author Antony Stubbs * @see PartitionStateManager */ // todo class becoming large - possible to extract some functionality? diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionStateManager.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionStateManager.java index d8fad68fc..afc9a3e2b 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionStateManager.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/PartitionStateManager.java @@ -29,6 +29,7 @@ *

    * This state is shared between the {@link BrokerPollSystem} thread and the {@link AbstractParallelEoSStreamProcessor}. * + * @author Antony Stubbs * @see PartitionState */ @Slf4j diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ProcessingShard.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ProcessingShard.java index de3dbd11c..a72187daf 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ProcessingShard.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ProcessingShard.java @@ -24,6 +24,8 @@ /** * Models the queue of work to be processed, based on the {@link ProcessingOrder} modes. + * + * @author Antony Stubbs */ @Slf4j @RequiredArgsConstructor diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/RemovedPartitionState.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/RemovedPartitionState.java index 8fd3133f9..bef1475cc 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/RemovedPartitionState.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/RemovedPartitionState.java @@ -28,6 +28,8 @@ * leak as the collection will forever expand. However, even massive partition counts to a single consumer would be in * the hundreds of thousands, this would only result in hundreds of thousands of {@link TopicPartition} object keys all * pointing to the same instance of {@link RemovedPartitionState}. + * + * @author Antony Stubbs */ @Slf4j public class RemovedPartitionState extends PartitionState { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ShardManager.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ShardManager.java index 9c0ef112b..2fe4b7ed0 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ShardManager.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/ShardManager.java @@ -34,6 +34,8 @@ * This state is shared between the {@link BrokerPollSystem} thread (write - adding and removing shards and work) and * the {@link AbstractParallelEoSStreamProcessor} Controller thread (read - how many records are in the shards?), so * must be thread safe. + * + * @author Antony Stubbs */ @Slf4j public class ShardManager { diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkContainer.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkContainer.java index 25071d0c1..7376afa2f 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkContainer.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkContainer.java @@ -24,7 +24,9 @@ import static java.util.Optional.of; /** - * Model object for metadata around processing state of {@link ConsumerRecord}s. + * Context object for a given {@link ConsumerRecord}, carrying completion status, various time stamps, retry data etc.. + * + * @author Antony Stubbs */ @Slf4j @EqualsAndHashCode diff --git a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkManager.java b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkManager.java index 20187c549..c69cbee2e 100644 --- a/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkManager.java +++ b/parallel-consumer-core/src/main/java/io/confluent/parallelconsumer/state/WorkManager.java @@ -23,7 +23,7 @@ /** * Sharded, prioritised, offset managed, order controlled, delayed work queue. *

    - * Low Water Mark - the highest offset (continuously successful) with all it's previous messages succeeded (the offset + * Low Watermark - the highest offset (continuously successful) with all it's previous messages succeeded (the offset * one commits to broker) *

    * High Water Mark - the highest offset which has succeeded (previous may be incomplete) @@ -32,8 +32,7 @@ *

    * This state is shared between the {@link BrokerPollSystem} thread and the {@link AbstractParallelEoSStreamProcessor}. * - * @param - * @param + * @author Antony Stubbs */ @Slf4j public class WorkManager implements ConsumerRebalanceListener { diff --git a/parallel-consumer-core/src/test-integration/java/io/confluent/parallelconsumer/integrationTests/utils/KafkaClientUtils.java b/parallel-consumer-core/src/test-integration/java/io/confluent/parallelconsumer/integrationTests/utils/KafkaClientUtils.java index b2ad82a63..247f01b6e 100644 --- a/parallel-consumer-core/src/test-integration/java/io/confluent/parallelconsumer/integrationTests/utils/KafkaClientUtils.java +++ b/parallel-consumer-core/src/test-integration/java/io/confluent/parallelconsumer/integrationTests/utils/KafkaClientUtils.java @@ -35,7 +35,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; -import static com.google.common.truth.Truth.assertThat; import static io.confluent.parallelconsumer.ParallelConsumerOptions.CommitMode.PERIODIC_CONSUMER_ASYNCHRONOUS; import static io.confluent.parallelconsumer.ParallelConsumerOptions.CommitMode.PERIODIC_TRANSACTIONAL_PRODUCER; import static io.confluent.parallelconsumer.integrationTests.utils.KafkaClientUtils.ProducerMode.NOT_TRANSACTIONAL; @@ -43,6 +42,7 @@ import static java.time.Duration.ofSeconds; import static java.util.Optional.empty; import static org.apache.commons.lang3.RandomUtils.nextInt; +import static org.assertj.core.api.Assertions.assertThat; /** * Utilities for creating and manipulating clients @@ -86,7 +86,6 @@ class PCVersion { */ private KafkaConsumer lastConsumerConstructed; - public KafkaClientUtils(KafkaContainer kafkaContainer) { kafkaContainer.addEnv("KAFKA_transaction_state_log_replication_factor", "1"); kafkaContainer.addEnv("KAFKA_transaction_state_log_min_isr", "1"); @@ -337,4 +336,5 @@ public ParallelEoSStreamProcessor buildPc(ProcessingOrder key) { public KafkaConsumer getLastConsumerConstructed() { return lastConsumerConstructed; } + } diff --git a/parallel-consumer-core/src/test/java/io/confluent/csid/utils/LongPollingMockConsumer.java b/parallel-consumer-core/src/test/java/io/confluent/csid/utils/LongPollingMockConsumer.java index b1977034d..21264c24c 100644 --- a/parallel-consumer-core/src/test/java/io/confluent/csid/utils/LongPollingMockConsumer.java +++ b/parallel-consumer-core/src/test/java/io/confluent/csid/utils/LongPollingMockConsumer.java @@ -26,8 +26,7 @@ * Used in tests to stub out the behaviour of the real Broker and Client's long polling system (the mock Kafka Consumer * doesn't have this behaviour). * - * @param - * @param + * @author Antony Stubbs */ @ToString @Slf4j diff --git a/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/CommitHistorySubject.java b/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/CommitHistorySubject.java index a86988fb0..4ce55b208 100644 --- a/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/CommitHistorySubject.java +++ b/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/CommitHistorySubject.java @@ -11,6 +11,8 @@ import io.stubbs.truth.generator.SubjectFactoryMethod; import io.stubbs.truth.generator.UserManagedSubject; import lombok.ToString; +import io.stubbs.truth.generator.SubjectFactoryMethod; +import io.stubbs.truth.generator.UserManagedSubject; import java.util.Arrays; import java.util.Optional; diff --git a/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/TruthGeneratorTests.java b/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/TruthGeneratorTests.java index 1532df721..2fe66919d 100644 --- a/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/TruthGeneratorTests.java +++ b/parallel-consumer-core/src/test/java/io/confluent/parallelconsumer/truth/TruthGeneratorTests.java @@ -17,6 +17,8 @@ /** * Basic tests of simple usage of the Truth Generator maven plugin + * + * @author Antony Stubbs */ class TruthGeneratorTests {