diff --git a/buildSrc/src/main/kotlin/Libs.kt b/buildSrc/src/main/kotlin/Libs.kt index cf0d73359..d48d4a625 100644 --- a/buildSrc/src/main/kotlin/Libs.kt +++ b/buildSrc/src/main/kotlin/Libs.kt @@ -97,7 +97,7 @@ object Libs { } object Pulsar { - const val version = "3.0.6" + const val version = "3.0.7" const val client = "org.apache.pulsar:pulsar-client:$version" const val clientAdmin = "org.apache.pulsar:pulsar-client-admin:$version" const val functions = "org.apache.pulsar:pulsar-functions-api:$version" diff --git a/infinitic-client/src/main/kotlin/io/infinitic/clients/InfiniticClient.kt b/infinitic-client/src/main/kotlin/io/infinitic/clients/InfiniticClient.kt index 2684b3596..4ef109db1 100644 --- a/infinitic-client/src/main/kotlin/io/infinitic/clients/InfiniticClient.kt +++ b/infinitic-client/src/main/kotlin/io/infinitic/clients/InfiniticClient.kt @@ -35,9 +35,9 @@ import io.infinitic.common.proxies.RequestByWorkflowId import io.infinitic.common.proxies.RequestByWorkflowTag import io.infinitic.common.tasks.data.ServiceName import io.infinitic.common.tasks.data.TaskId -import io.infinitic.common.transport.logged.LoggedInfiniticConsumer -import io.infinitic.common.transport.logged.LoggedInfiniticProducer -import io.infinitic.common.transport.logged.LoggedInfiniticResources +import io.infinitic.common.transport.InfiniticConsumer +import io.infinitic.common.transport.InfiniticProducer +import io.infinitic.common.transport.InfiniticResources import io.infinitic.common.utils.annotatedName import io.infinitic.common.workflows.data.workflowMethods.WorkflowMethodId import io.infinitic.common.workflows.data.workflows.WorkflowMeta @@ -63,16 +63,12 @@ class InfiniticClient( val config: InfiniticClientConfigInterface ) : InfiniticClientInterface { - private val resources by lazy { - LoggedInfiniticResources(logger, config.transport.resources) - } - private val consumer by lazy { - LoggedInfiniticConsumer(logger, config.transport.consumer) - } - private val producer by lazy { - LoggedInfiniticProducer(logger, config.transport.producer).apply { - config.name?.let { setSuggestedName(it) } - } + private val resources: InfiniticResources by lazy { config.transport.resources } + + private val consumer: InfiniticConsumer by lazy { config.transport.consumer } + + private val producer: InfiniticProducer by lazy { + config.transport.producer.apply { config.name?.let { setSuggestedName(it) } } } private val shutdownGracePeriodSeconds = config.transport.shutdownGracePeriodSeconds @@ -82,7 +78,7 @@ class InfiniticClient( // Scope used to asynchronously send message, and also to consumes messages internal val clientScope = CoroutineScope(Dispatchers.IO) - private val dispatcher by lazy { ClientDispatcher(clientScope, consumer, producer, logger) } + private val dispatcher by lazy { ClientDispatcher(clientScope, consumer, producer) } override suspend fun getName() = producer.getName() @@ -202,20 +198,21 @@ class InfiniticClient( /** get ids of a stub, associated to a specific tag */ - override fun getIds(stub: T): Set = - when (val handler = getProxyHandler(stub)) { - is ExistingWorkflowProxyHandler -> when (handler.requestBy) { - is RequestByWorkflowTag -> dispatcher.getWorkflowIdsByTag( - handler.workflowName, - (handler.requestBy as RequestByWorkflowTag).workflowTag, - ) - - is RequestByWorkflowId -> throw InvalidIdTagSelectionException("$stub") - } - - else -> throw InvalidStubException("$stub") + override fun getIds(stub: T): Set = runBlocking { + when (val handler = getProxyHandler(stub)) { + is ExistingWorkflowProxyHandler -> when (handler.requestBy) { + is RequestByWorkflowTag -> dispatcher.getWorkflowIdsByTag( + handler.workflowName, + (handler.requestBy as RequestByWorkflowTag).workflowTag, + ) + + is RequestByWorkflowId -> throw InvalidIdTagSelectionException("$stub") } + else -> throw InvalidStubException("$stub") + } + } + override fun startAsync(invoke: () -> R): CompletableFuture> { val handler = ProxyHandler.async(invoke) ?: throw InvalidStubException() @@ -270,7 +267,7 @@ class InfiniticClient( companion object { - private val logger = KotlinLogging.logger {} + internal val logger = KotlinLogging.logger {} /** Create InfiniticClient with config from resources directory */ @JvmStatic diff --git a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredChannel.kt b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredChannel.kt index 8374139f5..b11cfa1fe 100644 --- a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredChannel.kt +++ b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredChannel.kt @@ -39,7 +39,7 @@ class DeferredChannel> internal constructor( thisShouldNotHappen() } - override fun await(): R = channel + override suspend fun await(): R = channel override val id: String get() { diff --git a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredSend.kt b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredSend.kt index fafdd8a32..e330a45a9 100644 --- a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredSend.kt +++ b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/DeferredSend.kt @@ -44,7 +44,7 @@ class DeferredSend internal constructor( // in order to send asynchronously the message // despite the synchronous syntax: workflow.channel @Suppress("UNCHECKED_CAST") - override fun await(): R = Unit as R + override suspend fun await(): R = Unit as R override val id: String = signalId.toString() diff --git a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/ExistingDeferredWorkflow.kt b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/ExistingDeferredWorkflow.kt index 8922bcad9..681df34a3 100644 --- a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/ExistingDeferredWorkflow.kt +++ b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/ExistingDeferredWorkflow.kt @@ -53,7 +53,7 @@ class ExistingDeferredWorkflow internal constructor( // this method retries workflowTask (unique for a workflow instance) override fun retryAsync() = dispatcher.retryWorkflowTaskAsync(workflowName, requestBy) - override fun await(): R = dispatcher.awaitExistingWorkflow(this, true) + override suspend fun await(): R = dispatcher.awaitExistingWorkflow(this, true) override val id by lazy { when (requestBy) { diff --git a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/NewDeferredWorkflow.kt b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/NewDeferredWorkflow.kt index 7bf19f1f0..766b4ff2b 100644 --- a/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/NewDeferredWorkflow.kt +++ b/infinitic-client/src/main/kotlin/io/infinitic/clients/deferred/NewDeferredWorkflow.kt @@ -50,7 +50,7 @@ class NewDeferredWorkflow internal constructor( override fun retryAsync() = dispatcher.retryWorkflowTaskAsync(workflowName, RequestByWorkflowId(workflowId)) - override fun await(): R = dispatcher.awaitNewWorkflow(this, true) + override suspend fun await(): R = dispatcher.awaitNewWorkflow(this, true) override val id: String = workflowId.toString() diff --git a/infinitic-client/src/main/kotlin/io/infinitic/clients/dispatcher/ClientDispatcher.kt b/infinitic-client/src/main/kotlin/io/infinitic/clients/dispatcher/ClientDispatcher.kt index b302531eb..44a2c9dc2 100644 --- a/infinitic-client/src/main/kotlin/io/infinitic/clients/dispatcher/ClientDispatcher.kt +++ b/infinitic-client/src/main/kotlin/io/infinitic/clients/dispatcher/ClientDispatcher.kt @@ -24,6 +24,7 @@ package io.infinitic.clients.dispatcher import io.github.oshai.kotlinlogging.KLogger import io.infinitic.clients.Deferred +import io.infinitic.clients.InfiniticClient import io.infinitic.clients.deferred.DeferredChannel import io.infinitic.clients.deferred.DeferredSend import io.infinitic.clients.deferred.ExistingDeferredWorkflow @@ -105,7 +106,6 @@ import kotlinx.coroutines.runBlocking import org.jetbrains.annotations.TestOnly import java.lang.reflect.Method import java.util.concurrent.CompletableFuture -import java.util.concurrent.CompletionException import java.util.concurrent.atomic.AtomicBoolean import io.infinitic.common.workflows.engine.messages.RetryTasks as RetryTaskInWorkflow import io.infinitic.common.workflows.tags.messages.RetryTasksByTag as RetryTaskInWorkflowByTag @@ -115,7 +115,6 @@ internal class ClientDispatcher( private val clientScope: CoroutineScope, private val consumer: InfiniticConsumer, private val producer: InfiniticProducer, - private val logger: KLogger ) : ProxyDispatcher { // Name of the client @@ -146,7 +145,7 @@ internal class ClientDispatcher( // asynchronous call: dispatch(stub::method)(*args) fun dispatchAsync(handler: ProxyHandler<*>): CompletableFuture> = - clientScope.runAsync { + runAsync { when (handler) { is NewWorkflowProxyHandler -> handler.dispatchMethod() is ExistingWorkflowProxyHandler -> handler.dispatchMethod() @@ -157,14 +156,15 @@ internal class ClientDispatcher( } // synchronous call: stub.method(*args) - override fun dispatchAndWait(handler: ProxyHandler<*>): R = - when (handler) { - is NewWorkflowProxyHandler -> handler.dispatchMethodAndWait() - is ExistingWorkflowProxyHandler -> handler.dispatchMethodAndWait() - is ChannelProxyHandler -> handler.dispatchSignal().await() - is ExistingServiceProxyHandler -> thisShouldNotHappen() - is NewServiceProxyHandler -> thisShouldNotHappen() - } + override fun dispatchAndWait(handler: ProxyHandler<*>): R = runBlocking { + when (handler) { + is NewWorkflowProxyHandler -> handler.dispatchMethodAndWait() + is ExistingWorkflowProxyHandler -> handler.dispatchMethodAndWait() + is ChannelProxyHandler -> handler.dispatchSignal().await() + is ExistingServiceProxyHandler -> thisShouldNotHappen() + is NewServiceProxyHandler -> thisShouldNotHappen() + } + } private suspend fun awaitNewWorkflowAsync( deferred: NewDeferredWorkflow, @@ -178,17 +178,15 @@ internal class ClientDispatcher( clientWaiting, ) - internal fun awaitNewWorkflow( + internal suspend fun awaitNewWorkflow( deferred: NewDeferredWorkflow, clientWaiting: Boolean - ): T = clientScope.run { - awaitNewWorkflowAsync(deferred, clientWaiting).await().getValue( - deferred.workflowName, - deferred.workflowId, - deferred.method, - WorkflowMethodId.from(deferred.workflowId), - ) - } + ): T = awaitNewWorkflowAsync(deferred, clientWaiting).await().getValue( + deferred.workflowName, + deferred.workflowId, + deferred.method, + WorkflowMethodId.from(deferred.workflowId), + ) private suspend fun awaitExistingWorkflowAsync( deferred: ExistingDeferredWorkflow, @@ -206,27 +204,25 @@ internal class ClientDispatcher( is RequestByWorkflowTag -> TODO() } - internal fun awaitExistingWorkflow( + internal suspend fun awaitExistingWorkflow( deferred: ExistingDeferredWorkflow, clientWaiting: Boolean - ): T = clientScope.run { - when (deferred.requestBy) { - is RequestByWorkflowId -> awaitWorkflowAsync( - deferred.workflowName, - deferred.requestBy.workflowId, - deferred.workflowMethodId, - deferred.methodTimeout, - deferred.dispatchTime, - clientWaiting, - ).await().getValue( - deferred.workflowName, - deferred.requestBy.workflowId, - deferred.method, - deferred.workflowMethodId, - ) as T - - is RequestByWorkflowTag -> TODO() - } + ): T = when (deferred.requestBy) { + is RequestByWorkflowId -> awaitWorkflowAsync( + deferred.workflowName, + deferred.requestBy.workflowId, + deferred.workflowMethodId, + deferred.methodTimeout, + deferred.dispatchTime, + clientWaiting, + ).await().getValue( + deferred.workflowName, + deferred.requestBy.workflowId, + deferred.method, + deferred.workflowMethodId, + ) as T + + is RequestByWorkflowTag -> TODO() } // wait for the completion of a method @@ -273,7 +269,7 @@ internal class ClientDispatcher( workflowName: WorkflowName, requestBy: RequestBy, workflowMethodId: WorkflowMethodId?, - ): CompletableFuture = clientScope.runAsync { + ): CompletableFuture = runAsync { when (requestBy) { is RequestByWorkflowId -> { val msg = CancelWorkflow( @@ -307,7 +303,7 @@ internal class ClientDispatcher( fun retryWorkflowTaskAsync( workflowName: WorkflowName, requestBy: RequestBy - ): CompletableFuture = clientScope.runAsync { + ): CompletableFuture = runAsync { when (requestBy) { is RequestByWorkflowId -> { val msg = RetryWorkflowTask( @@ -339,7 +335,7 @@ internal class ClientDispatcher( serviceName: ServiceName, taskId: TaskId, returnValue: MethodReturnValue - ): CompletableFuture = clientScope.runAsync { + ): CompletableFuture = runAsync { val msg = CompleteDelegatedTask( serviceName = serviceName, taskId = taskId, @@ -353,7 +349,7 @@ internal class ClientDispatcher( workflowName: WorkflowName, requestBy: RequestBy, workflowMethodId: WorkflowMethodId? - ): CompletableFuture = clientScope.runAsync { + ): CompletableFuture = runAsync { when (requestBy) { is RequestByWorkflowId -> { val msg = CompleteTimers( @@ -389,7 +385,7 @@ internal class ClientDispatcher( taskId: TaskId?, taskStatus: DeferredStatus?, serviceName: ServiceName? - ): CompletableFuture = clientScope.runAsync { + ): CompletableFuture = runAsync { when (requestBy) { is RequestByWorkflowId -> { val msg = RetryTaskInWorkflow( @@ -423,11 +419,10 @@ internal class ClientDispatcher( } } - - fun getWorkflowIdsByTag( + suspend fun getWorkflowIdsByTag( workflowName: WorkflowName, workflowTag: WorkflowTag - ): Set = clientScope.run { + ): Set { // lazily starts client consumer if not already started and waits val waiting = awaitAsync { (it is WorkflowIdsByTag) && @@ -447,7 +442,7 @@ internal class ClientDispatcher( val workflowIdsByTag = waiting.await() as WorkflowIdsByTag - workflowIdsByTag.workflowIds.map { it.toString() }.toSet() + return workflowIdsByTag.workflowIds.map { it.toString() }.toSet() } private suspend fun NewWorkflowProxyHandler<*>.dispatchMethod(): Deferred = @@ -467,7 +462,7 @@ internal class ClientDispatcher( } // synchronous call: stub.method(*args) - private fun NewWorkflowProxyHandler<*>.dispatchMethodAndWait(): R = + private suspend fun NewWorkflowProxyHandler<*>.dispatchMethodAndWait(): R = when (isChannelGetter()) { true -> throw InvalidChannelUsageException() false -> { @@ -479,19 +474,17 @@ internal class ClientDispatcher( getTimeout(), ) - clientScope.run { - val future = awaitNewWorkflowAsync(deferredWorkflow, false) + val future = awaitNewWorkflowAsync(deferredWorkflow, false) - // synchronously send the message to get errors - dispatchMethod(deferredWorkflow, true) + // synchronously send the message to get errors + dispatchMethod(deferredWorkflow, true) - future.await().getValue( - workflowName, - deferredWorkflow.workflowId, - deferredWorkflow.method, - WorkflowMethodId.from(deferredWorkflow.workflowId), - ) as R - } + future.await().getValue( + workflowName, + deferredWorkflow.workflowId, + deferredWorkflow.method, + WorkflowMethodId.from(deferredWorkflow.workflowId), + ) as R } } @@ -598,7 +591,7 @@ internal class ClientDispatcher( // synchronous call: stub.method(*args) @Suppress("UNCHECKED_CAST") - private fun ExistingWorkflowProxyHandler<*>.dispatchMethodAndWait(): R = + private suspend fun ExistingWorkflowProxyHandler<*>.dispatchMethodAndWait(): R = when (isChannelGetter()) { true -> { // special case of getting a channel from a workflow @@ -614,19 +607,17 @@ internal class ClientDispatcher( getTimeout(), ) - clientScope.run { - val future = awaitExistingWorkflowAsync(deferred, false) + val future = awaitExistingWorkflowAsync(deferred, false) - // send the message synchronously to get errors - dispatchMethod(deferred, true) + // send the message synchronously to get errors + dispatchMethod(deferred, true) - future.await().getValue( - deferred.workflowName, - deferred.requestBy.workflowId!!, - deferred.method, - deferred.workflowMethodId, - ) as R - } + future.await().getValue( + deferred.workflowName, + deferred.requestBy.workflowId!!, + deferred.method, + deferred.workflowMethodId, + ) as R } } @@ -685,12 +676,12 @@ internal class ClientDispatcher( } // asynchronous call: dispatch(stub.channel::send, signal) - private fun ChannelProxyHandler<*>.dispatchSignal() = + private suspend fun ChannelProxyHandler<*>.dispatchSignal() = deferredSend().also { dispatchSignal(it) } - private fun ChannelProxyHandler<*>.dispatchSignal( + private suspend fun ChannelProxyHandler<*>.dispatchSignal( deferredSend: DeferredSend<*>, - ) = clientScope.run { + ) { if (annotatedMethodName.toString() != SendChannel<*>::send.name) thisShouldNotHappen() when (requestBy) { @@ -785,21 +776,40 @@ internal class ClientDispatcher( else -> thisShouldNotHappen("Unexpected $this") } + + /** + * Starts listening asynchronously for incoming messages for a specific client. + * + * This method ensures that a consumer client, identified by `emitterName`, is created and started + * only once. It launches a coroutine to handle the listening process and ensures that messages + * are emitted to the response flow or any errors are propagated appropriately. + * + * This function must be called with an active CoroutineScope and KLogger context. + * + * The function performs the following tasks: + * - It checks if the consumer client has already started using `hasClientConsumerStarted`. + * - Logs the starting message for the client using `KLogger`. + * - Invokes `consumer.startAsync` to start listening to the specified subscription synchronously. + * - Launches a coroutine to handle the asynchronous listening process, which waits for messages + * and emits them to `responseFlow`. + * - Catches any exceptions during message processing and emits them to the `responseFlow` before rethrowing. + * + * This function suspends until the consumer client is successfully started and the listening coroutine is launched. + */ + context(CoroutineScope, KLogger) private suspend fun startListeningAsync() { if (hasClientConsumerStarted.compareAndSet(false, true)) { - logger.info { "Starting consumer client for client $emitterName" } + info { "Starting consumer client for client $emitterName" } // synchronously make sure that the consumer is created and started - val listenerJob = with(clientScope) { - consumer.startAsync( - subscription = MainSubscription(ClientTopic), - entity = emitterName.toString(), - concurrency = 1, - process = { message, _ -> responseFlow.emit(message) }, - beforeDlq = null, - ) - } + val listenerJob = + consumer.startAsync( + subscription = MainSubscription(ClientTopic), + entity = emitterName.toString(), + concurrency = 1, + process = { message, _ -> responseFlow.emit(message) }, + ) // asynchronously listen - clientScope.launch { + launch { try { listenerJob.join() } catch (e: Exception) { @@ -811,21 +821,28 @@ internal class ClientDispatcher( } } + /** + * Awaits until a `ClientMessage` that matches the given predicate is received + * or the timeout is reached, in this case response will be null + * Ensures that the client is listening for messages before awaiting. + * + * @param timeout The maximum time to wait for the message. Defaults to `Long.MAX_VALUE`. + * @param predicate A suspend function that determines if a received `ClientMessage` matches the criteria. + * @return A `CoroutineDeferred` that will provide the matching `ClientMessage` or `null` if the timeout is reached. + */ private suspend fun awaitAsync( timeout: Long = Long.MAX_VALUE, predicate: suspend (ClientMessage) -> Boolean - ): CoroutineDeferred { + ): CoroutineDeferred = with(clientScope) { // make sure the client is listening - startListeningAsync() + with(logger) { startListeningAsync() } - return clientScope.async { - responseFlow.first(timeout) { predicate(it) } - } + return async { responseFlow.first(timeout) { predicate(it) } } } - private fun CoroutineScope.runAsync(block: suspend () -> S): CompletableFuture = + private fun runAsync(block: suspend () -> S): CompletableFuture = CompletableFuture().also { - launch { + clientScope.launch { try { it.complete(block()) } catch (e: CancellationException) { @@ -836,15 +853,11 @@ internal class ClientDispatcher( } } - private fun CoroutineScope.run(block: suspend () -> S): S = try { - runAsync(block).join() - } catch (e: CompletionException) { - throw e.cause ?: e - } - companion object { @TestOnly @JvmStatic private val localLastDeferred: ThreadLocal?> = ThreadLocal() + + internal val logger = InfiniticClient.logger } } diff --git a/infinitic-client/src/test/kotlin/io/infinitic/clients/InfiniticClientTests.kt b/infinitic-client/src/test/kotlin/io/infinitic/clients/InfiniticClientTests.kt index ccb0aeefc..56a967b4e 100644 --- a/infinitic-client/src/test/kotlin/io/infinitic/clients/InfiniticClientTests.kt +++ b/infinitic-client/src/test/kotlin/io/infinitic/clients/InfiniticClientTests.kt @@ -22,6 +22,7 @@ */ package io.infinitic.clients +import io.github.oshai.kotlinlogging.KLogger import io.infinitic.clients.config.InfiniticClientConfig import io.infinitic.clients.deferred.ExistingDeferredWorkflow import io.infinitic.clients.samples.FakeClass @@ -75,7 +76,7 @@ import io.infinitic.common.workflows.engine.messages.RetryTasks import io.infinitic.common.workflows.engine.messages.SendSignal import io.infinitic.common.workflows.engine.messages.WaitWorkflow import io.infinitic.common.workflows.engine.messages.WorkflowEngineEnvelope -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.tags.messages.AddTagToWorkflow import io.infinitic.common.workflows.tags.messages.CancelWorkflowByTag import io.infinitic.common.workflows.tags.messages.DispatchMethodByTag @@ -106,9 +107,10 @@ import java.util.concurrent.CopyOnWriteArrayList private val taskTagSlots = CopyOnWriteArrayList() private val workflowTagSlots = CopyOnWriteArrayList() private val taskSlot = slot() -private val workflowCmdSlots = CopyOnWriteArrayList() +private val workflowCmdSlots = CopyOnWriteArrayList() private val delaySlot = slot() private val scopeSlot = slot() +private val loggerSlot = slot() private val clientNameTest = ClientName("clientTest") private val emitterNameTest = EmitterName("clientTest") @@ -147,20 +149,22 @@ internal val mockedProducer = mockk { getName() } returns "$clientNameTest" coEvery { - internalSendTo(capture(taskTagSlots), ServiceTagEngineTopic) + with(capture(taskTagSlots)) { sendTo(ServiceTagEngineTopic) } } answers { } coEvery { - internalSendTo(capture(workflowTagSlots), WorkflowTagEngineTopic) + with(capture(workflowTagSlots)) { sendTo(WorkflowTagEngineTopic) } } coAnswers { tagResponse() } coEvery { - internalSendTo(capture(workflowCmdSlots), WorkflowStateCmdTopic) + with(capture(workflowCmdSlots)) { sendTo(WorkflowStateCmdTopic) } } coAnswers { engineResponse() } } internal val mockedConsumer = mockk { coEvery { with(capture(scopeSlot)) { - startAsync(any>(), "$clientNameTest", 1, any(), any()) + with(capture(loggerSlot)) { + startAsync(any>(), "$clientNameTest", 1, any(), any()) + } } } answers { scopeSlot.captured.launch { delay(Long.MAX_VALUE) } @@ -192,6 +196,7 @@ internal class InfiniticClientTests : StringSpec( val fakeWorkflowWithTags = client.newWorkflow(FakeWorkflow::class.java, tags = tags) beforeTest { + loggerSlot.clear() scopeSlot.clear() delaySlot.clear() taskTagSlots.clear() @@ -227,13 +232,15 @@ internal class InfiniticClientTests : StringSpec( // when asynchronously dispatching a workflow, the consumer should not be started coVerify(exactly = 0) { with(client.clientScope) { - mockedConsumer.start( - MainSubscription(ClientTopic), - "$clientNameTest", - 1, - any(), - any(), - ) + with(InfiniticClient.logger) { + mockedConsumer.startAsync( + MainSubscription(ClientTopic), + "$clientNameTest", + 1, + any(), + any(), + ) + } } } } @@ -427,33 +434,36 @@ internal class InfiniticClientTests : StringSpec( val deferred2 = future2.await() // then workflowCmdSlots.size shouldBe 2 - workflowCmdSlots[0] shouldBe DispatchWorkflow( - workflowName = WorkflowName(FakeWorkflow::class.java.name), - workflowId = WorkflowId(deferred1.id), - methodName = MethodName("m3"), - methodParameters = methodParametersFrom(0, "a"), - methodParameterTypes = - MethodParameterTypes(listOf(Int::class.java.name, String::class.java.name)), - workflowTags = setOf(), - workflowMeta = WorkflowMeta(), - requester = ClientRequester(clientName = ClientName.from(emitterNameTest)), - clientWaiting = false, - emitterName = emitterNameTest, - emittedAt = null, - ) - workflowCmdSlots[1] shouldBe DispatchWorkflow( - workflowName = WorkflowName(FakeWorkflow::class.java.name), - workflowId = WorkflowId(deferred2.id), - methodName = MethodName("m2"), - methodParameters = methodParametersFrom("b"), - methodParameterTypes = - MethodParameterTypes(listOf(String::class.java.name)), - workflowTags = setOf(), - workflowMeta = WorkflowMeta(), - requester = ClientRequester(clientName = ClientName.from(emitterNameTest)), - clientWaiting = false, - emitterName = emitterNameTest, - emittedAt = null, + // we do not know what will be the order + setOf(workflowCmdSlots[0], workflowCmdSlots[1]) shouldBe setOf( + DispatchWorkflow( + workflowName = WorkflowName(FakeWorkflow::class.java.name), + workflowId = WorkflowId(deferred1.id), + methodName = MethodName("m3"), + methodParameters = methodParametersFrom(0, "a"), + methodParameterTypes = + MethodParameterTypes(listOf(Int::class.java.name, String::class.java.name)), + workflowTags = setOf(), + workflowMeta = WorkflowMeta(), + requester = ClientRequester(clientName = ClientName.from(emitterNameTest)), + clientWaiting = false, + emitterName = emitterNameTest, + emittedAt = null, + ), + DispatchWorkflow( + workflowName = WorkflowName(FakeWorkflow::class.java.name), + workflowId = WorkflowId(deferred2.id), + methodName = MethodName("m2"), + methodParameters = methodParametersFrom("b"), + methodParameterTypes = + MethodParameterTypes(listOf(String::class.java.name)), + workflowTags = setOf(), + workflowMeta = WorkflowMeta(), + requester = ClientRequester(clientName = ClientName.from(emitterNameTest)), + clientWaiting = false, + emitterName = emitterNameTest, + emittedAt = null, + ), ) } @@ -511,13 +521,15 @@ internal class InfiniticClientTests : StringSpec( // when waiting for a workflow, the consumer should be started coVerify { with(client.clientScope) { - mockedConsumer.startAsync( - MainSubscription(ClientTopic), - "$clientNameTest", - 1, - any(), - any(), - ) + with(InfiniticClient.logger) { + mockedConsumer.startAsync( + MainSubscription(ClientTopic), + "$clientNameTest", + 1, + any(), + any(), + ) + } } } @@ -527,13 +539,15 @@ internal class InfiniticClientTests : StringSpec( // the consumer should be started only once coVerify(exactly = 1) { with(client.clientScope) { - mockedConsumer.startAsync( - MainSubscription(ClientTopic), - "$clientNameTest", - 1, - any(), - any(), - ) + with(InfiniticClient.logger) { + mockedConsumer.startAsync( + MainSubscription(ClientTopic), + "$clientNameTest", + 1, + any(), + any(), + ) + } } } } diff --git a/infinitic-cloudevents/build.gradle.kts b/infinitic-cloudevents/build.gradle.kts index 9eeb55dcf..5fd5cdc2b 100644 --- a/infinitic-cloudevents/build.gradle.kts +++ b/infinitic-cloudevents/build.gradle.kts @@ -22,9 +22,12 @@ */ dependencies { + implementation(project(":infinitic-utils")) implementation(project(":infinitic-common")) implementation(project(":infinitic-transport-inMemory")) + implementation(Libs.Coroutines.core) + implementation(Libs.Serialization.json) implementation(Libs.CloudEvents.json) implementation(Libs.Avro4k.core) diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/CloudEventLogger.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/CloudEventLogger.kt new file mode 100644 index 000000000..ac7455957 --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/CloudEventLogger.kt @@ -0,0 +1,95 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events + +import io.github.oshai.kotlinlogging.KotlinLogging +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.messages.Message +import io.infinitic.common.transport.ClientTopic +import io.infinitic.common.transport.NamingTopic +import io.infinitic.common.transport.ServiceExecutorEventTopic +import io.infinitic.common.transport.ServiceExecutorRetryTopic +import io.infinitic.common.transport.ServiceExecutorTopic +import io.infinitic.common.transport.ServiceTagEngineTopic +import io.infinitic.common.transport.Topic +import io.infinitic.common.transport.WorkflowExecutorEventTopic +import io.infinitic.common.transport.WorkflowExecutorRetryTopic +import io.infinitic.common.transport.WorkflowExecutorTopic +import io.infinitic.common.transport.WorkflowStateCmdTopic +import io.infinitic.common.transport.WorkflowStateEngineTopic +import io.infinitic.common.transport.WorkflowStateEventTopic +import io.infinitic.common.transport.WorkflowStateTimerTopic +import io.infinitic.common.transport.WorkflowTagEngineTopic +import io.infinitic.logger.ignoreNull + +private const val INFINITIC_PREFIX = "io.infinitic" +private const val CLOUD_EVENTS = "$INFINITIC_PREFIX.cloudEvents" + +private const val WORKFLOW_STATE_ENGINE = "WorkflowStateEngine" +private const val WORKFLOW_TAG_ENGINE = "WorkflowTagEngine" +private const val WORKFLOW_EXECUTOR = "WorkflowExecutor" +private const val SERVICE_TAG_ENGINE = "ServiceTagEngine" +private const val SERVICE_EXECUTOR = "ServiceExecutor" + +const val LOGS_WORKFLOW_STATE_ENGINE = "$CLOUD_EVENTS.$WORKFLOW_STATE_ENGINE" +const val LOGS_WORKFLOW_TAG_ENGINE = "$CLOUD_EVENTS.$WORKFLOW_TAG_ENGINE" +const val LOGS_WORKFLOW_EXECUTOR = "$CLOUD_EVENTS.$WORKFLOW_EXECUTOR" +const val LOGS_SERVICE_TAG_ENGINE = "$CLOUD_EVENTS.$SERVICE_TAG_ENGINE" +const val LOGS_SERVICE_EXECUTOR = "$CLOUD_EVENTS.$SERVICE_EXECUTOR" + +class CloudEventLogger( + private val topic: Topic<*>, + entity: String, + private val prefix: String, + private val beautify: Boolean +) { + private val logger = topic.eventLogger(entity)?.ignoreNull() + + fun log(message: Message, publishedAt: MillisInstant) = try { + logger?.debug { + message.toCloudEvent(topic, publishedAt, prefix)?.toJsonString(beautify) + } + } catch (e: Exception) { + logger?.warn(e) { "Error while logging the CloudEvent json of: $message" } + } + + private fun Topic<*>.eventLogger(entity: String) = when (this) { + ClientTopic -> null + NamingTopic -> null + ServiceExecutorEventTopic -> "$LOGS_SERVICE_EXECUTOR.$entity" + ServiceExecutorRetryTopic -> null + ServiceExecutorTopic -> "$LOGS_SERVICE_EXECUTOR.$entity" + ServiceTagEngineTopic -> "$LOGS_SERVICE_TAG_ENGINE.$entity" + WorkflowExecutorEventTopic -> "$LOGS_WORKFLOW_EXECUTOR.$entity" + WorkflowExecutorRetryTopic -> null + WorkflowExecutorTopic -> "$LOGS_WORKFLOW_EXECUTOR.$entity" + WorkflowStateCmdTopic -> "$LOGS_WORKFLOW_STATE_ENGINE.$entity" + WorkflowStateEngineTopic -> "$LOGS_WORKFLOW_STATE_ENGINE.$entity" + WorkflowStateEventTopic -> "$LOGS_WORKFLOW_STATE_ENGINE.$entity" + WorkflowStateTimerTopic -> null + WorkflowTagEngineTopic -> "$LOGS_WORKFLOW_TAG_ENGINE.$entity" + }?.let { + KotlinLogging.logger(it) + } +} + diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/EventListener.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/EventListener.kt deleted file mode 100644 index c6511a5a4..000000000 --- a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/EventListener.kt +++ /dev/null @@ -1,34 +0,0 @@ -/** - * "Commons Clause" License Condition v1.0 - * - * The Software is provided to you by the Licensor under the License, as defined below, subject to - * the following condition. - * - * Without limiting other conditions in the License, the grant of rights under the License will not - * include, and the License does not grant to you, the right to Sell the Software. - * - * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you - * under the License to provide to third parties, for a fee or other consideration (including - * without limitation fees for hosting or consulting/ support services related to the Software), a - * product or service whose value derives, entirely or substantially, from the functionality of the - * Software. Any license notice or attribution required by the License must also include this - * Commons Clause License Condition notice. - * - * Software: Infinitic - * - * License: MIT License (https://opensource.org/licenses/MIT) - * - * Licensor: infinitic.io - */ - -package io.infinitic.events - -import io.github.oshai.kotlinlogging.KotlinLogging -import io.infinitic.cloudEvents.CloudEventListener - -class EventListener(val listener: CloudEventListener, val source: String) { - - companion object { - val logger = KotlinLogging.logger {} - } -} diff --git a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/EventListenerConfig.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/config/EventListenerConfig.kt similarity index 69% rename from infinitic-worker/src/main/kotlin/io/infinitic/workers/config/EventListenerConfig.kt rename to infinitic-cloudevents/src/main/kotlin/io/infinitic/events/config/EventListenerConfig.kt index 399a3edc2..3a470f1c5 100644 --- a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/EventListenerConfig.kt +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/config/EventListenerConfig.kt @@ -20,10 +20,10 @@ * * Licensor: infinitic.io */ -package io.infinitic.workers.config +package io.infinitic.events.config import io.infinitic.cloudEvents.CloudEventListener -import io.infinitic.cloudEvents.SelectionConfig +import io.infinitic.cloudEvents.EntityListConfig import io.infinitic.common.utils.annotatedName import io.infinitic.common.utils.getInstance import io.infinitic.config.loadFromYamlFile @@ -34,18 +34,18 @@ sealed class EventListenerConfig { abstract val listener: CloudEventListener abstract val concurrency: Int abstract val subscriptionName: String? - abstract val allowedServices: List? - abstract val disallowedServices: List - abstract val allowedWorkflows: List? - abstract val disallowedWorkflows: List - abstract val refreshDelaySeconds: Double + abstract val batchConfig: EventListenerBatchConfig + abstract val serviceListConfig: EntityListConfig + abstract val workflowListConfig: EntityListConfig fun includeService(service: String): Boolean { - return !disallowedServices.contains(service) && (allowedServices?.contains(service) != false) + return !serviceListConfig.disallow.contains(service) && + (serviceListConfig.allow?.contains(service) != false) } fun includeWorkflow(workflow: String): Boolean { - return !disallowedWorkflows.contains(workflow) && (allowedWorkflows?.contains(workflow) != false) + return !workflowListConfig.disallow.contains(workflow) && + (workflowListConfig.allow?.contains(workflow) != false) } companion object { @@ -70,8 +70,8 @@ sealed class EventListenerConfig { * Create EventListenerConfig from yaml strings */ @JvmStatic - fun fromYamlString(vararg yamls: String): EventListenerConfig = - loadFromYamlString(*yamls) + fun fromYamlString(vararg yaml: String): EventListenerConfig = + loadFromYamlString(*yaml) } /** @@ -82,10 +82,12 @@ sealed class EventListenerConfig { private var concurrency: Int = 1 private var subscriptionName: String? = null private var allowedServices: MutableList? = null - private val disallowedServices: MutableList = mutableListOf() private var allowedWorkflows: MutableList? = null + private val disallowedServices: MutableList = mutableListOf() private val disallowedWorkflows: MutableList = mutableListOf() - private var refreshDelaySeconds: Double = 60.0 + private var serviceListRefreshSeconds: Double = 60.0 + private var workflowListRefreshSeconds: Double = 60.0 + private var batchConfig = EventListenerBatchConfig() fun setListener(cloudEventListener: CloudEventListener) = apply { this.listener = cloudEventListener } @@ -128,8 +130,14 @@ sealed class EventListenerConfig { fun allowWorkflows(vararg workflows: Class<*>) = apply { allowWorkflows(*(workflows.map { it.annotatedName }.toTypedArray())) } - fun setRefreshDelaySeconds(refreshDelaySeconds: Double) = - apply { this.refreshDelaySeconds = refreshDelaySeconds } + fun setServiceListRefreshSeconds(listRefreshSeconds: Double) = + apply { this.serviceListRefreshSeconds = listRefreshSeconds } + + fun setWorkflowListRefreshSeconds(listRefreshSeconds: Double) = + apply { this.workflowListRefreshSeconds = listRefreshSeconds } + + fun setBatch(maxEvents: Int, maxSeconds: Double) = + apply { this.batchConfig = EventListenerBatchConfig(maxEvents, maxSeconds) } fun build(): EventListenerConfig { require(listener != null) { "${EventListenerConfig::listener.name} must not be null" } @@ -138,11 +146,17 @@ sealed class EventListenerConfig { listener!!, concurrency, subscriptionName, - refreshDelaySeconds, - allowedServices, - disallowedServices, - allowedWorkflows, - disallowedWorkflows, + batchConfig, + EntityListConfig( + listRefreshSeconds = serviceListRefreshSeconds, + allow = allowedServices, + disallow = disallowedServices, + ), + EntityListConfig( + listRefreshSeconds = workflowListRefreshSeconds, + allow = allowedWorkflows, + disallow = disallowedWorkflows, + ), ) } } @@ -155,11 +169,9 @@ data class BuiltEventListenerConfig( override val listener: CloudEventListener, override val concurrency: Int, override val subscriptionName: String?, - override val refreshDelaySeconds: Double, - override val allowedServices: MutableList?, - override val disallowedServices: MutableList, - override val allowedWorkflows: MutableList?, - override val disallowedWorkflows: MutableList, + override val batchConfig: EventListenerBatchConfig, + override val serviceListConfig: EntityListConfig, + override val workflowListConfig: EntityListConfig, ) : EventListenerConfig() /** @@ -169,16 +181,15 @@ data class LoadedEventListenerConfig( val `class`: String, override val concurrency: Int = 1, override val subscriptionName: String? = null, - override val refreshDelaySeconds: Double = 60.0, - val services: SelectionConfig = SelectionConfig(), - val workflows: SelectionConfig = SelectionConfig() + val batch: EventListenerBatchConfig = EventListenerBatchConfig(), + val services: EntityListConfig = EntityListConfig(), + val workflows: EntityListConfig = EntityListConfig() ) : EventListenerConfig() { override val listener: CloudEventListener - override var allowedServices = services.allow - override val disallowedServices = services.disallow - override var allowedWorkflows = workflows.allow - override val disallowedWorkflows = workflows.disallow + override val batchConfig = batch + override val serviceListConfig = services + override val workflowListConfig = workflows init { with(`class`) { @@ -192,12 +203,22 @@ data class LoadedEventListenerConfig( require(concurrency > 0) { error("'${::concurrency.name}' must be > 0, but was $concurrency") } - require(refreshDelaySeconds >= 0) { error("'${::refreshDelaySeconds.name}' must be >= 0, but was $refreshDelaySeconds") } - subscriptionName?.let { require(it.isNotEmpty()) { error("'when provided, ${::subscriptionName.name}' must not be empty") } } } +} + +private fun error(txt: String) = "eventListener: $txt" + +data class EventListenerBatchConfig( + val maxEvents: Int = 1000, + val maxSeconds: Double = 1.0 +) { + init { + require(maxEvents > 0) { error("'${::maxEvents.name}' must be > 0, but was $maxEvents") } + require(maxSeconds > 0) { error("'${::maxSeconds.name}' must be > 0, but was $maxSeconds") } + } - private fun error(txt: String) = "eventListener: $txt" + val maxMillis = (maxSeconds * 1000).toLong() } diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/services/serviceTypes.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/services/serviceTypes.kt index b2e71fe56..a42425d9a 100644 --- a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/services/serviceTypes.kt +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/services/serviceTypes.kt @@ -32,9 +32,9 @@ import io.infinitic.common.tasks.executors.messages.ExecuteTask import io.infinitic.common.tasks.executors.messages.ServiceExecutorMessage import io.infinitic.events.types.COMPLETED import io.infinitic.events.types.DELEGATION_COMPLETED +import io.infinitic.events.types.DISPATCH import io.infinitic.events.types.FAILED import io.infinitic.events.types.RETRY_SCHEDULED -import io.infinitic.events.types.START import io.infinitic.events.types.STARTED import io.infinitic.events.types.TYPE_TASK @@ -50,6 +50,6 @@ fun ServiceExecutorEventMessage.serviceType(): String = "$TYPE_TASK." + when (th } fun ServiceExecutorMessage.serviceType(): String = "$TYPE_TASK." + when (this) { - is ExecuteTask -> START + is ExecuteTask -> DISPATCH } diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowData.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowData.kt index 9ca124088..42e8d38fa 100644 --- a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowData.kt +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowData.kt @@ -77,9 +77,9 @@ import io.infinitic.common.workflows.engine.messages.TimerDispatchedEvent import io.infinitic.common.workflows.engine.messages.WaitWorkflow import io.infinitic.common.workflows.engine.messages.WorkflowCanceledEvent import io.infinitic.common.workflows.engine.messages.WorkflowCompletedEvent -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.events.errors.toJson import io.infinitic.events.types.REMOTE_METHOD_DISPATCHED import io.infinitic.events.types.SIGNAL_DISCARDED @@ -88,7 +88,7 @@ import io.infinitic.events.types.SIGNAL_RECEIVED import io.infinitic.events.types.TIMER_DISPATCHED import kotlinx.serialization.json.JsonObject -fun WorkflowStateEngineCmdMessage.toJson(): JsonObject = when (this) { +fun WorkflowStateCmdMessage.toJson(): JsonObject = when (this) { is DispatchWorkflow -> JsonObject( mapOf( workflowSimpleType()!! to JsonObject( @@ -186,7 +186,7 @@ fun WorkflowStateEngineCmdMessage.toJson(): JsonObject = when (this) { fun WorkflowStateEngineMessage.toJson(): JsonObject = when (this) { - is WorkflowStateEngineCmdMessage -> thisShouldNotHappen() + is WorkflowStateCmdMessage -> thisShouldNotHappen() is RemoteMethodCompleted -> JsonObject( mapOf( @@ -313,7 +313,7 @@ fun WorkflowStateEngineMessage.toJson(): JsonObject = when (this) { ) } -fun WorkflowStateEngineEventMessage.toJson(): JsonObject = when (this) { +fun WorkflowStateEventMessage.toJson(): JsonObject = when (this) { is WorkflowCompletedEvent -> JsonObject( mapOf( diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowTypes.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowTypes.kt index eec1865cb..c620e2092 100644 --- a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowTypes.kt +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/data/workflows/workflowTypes.kt @@ -56,13 +56,15 @@ import io.infinitic.common.workflows.engine.messages.TimerDispatchedEvent import io.infinitic.common.workflows.engine.messages.WaitWorkflow import io.infinitic.common.workflows.engine.messages.WorkflowCanceledEvent import io.infinitic.common.workflows.engine.messages.WorkflowCompletedEvent -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.events.types.CANCEL import io.infinitic.events.types.CANCELED import io.infinitic.events.types.CANCEL_METHOD -import io.infinitic.events.types.ENDED +import io.infinitic.events.types.COMPLETED +import io.infinitic.events.types.DISPATCH +import io.infinitic.events.types.DISPATCH_METHOD import io.infinitic.events.types.EXECUTOR_COMPLETED import io.infinitic.events.types.EXECUTOR_DISPATCHED import io.infinitic.events.types.EXECUTOR_FAILED @@ -82,8 +84,6 @@ import io.infinitic.events.types.SIGNAL import io.infinitic.events.types.SIGNAL_DISCARDED import io.infinitic.events.types.SIGNAL_DISPATCHED import io.infinitic.events.types.SIGNAL_RECEIVED -import io.infinitic.events.types.START -import io.infinitic.events.types.START_METHOD import io.infinitic.events.types.TASK_COMPLETED import io.infinitic.events.types.TASK_DISPATCHED import io.infinitic.events.types.TASK_FAILED @@ -92,9 +92,9 @@ import io.infinitic.events.types.TIMER_COMPLETED import io.infinitic.events.types.TIMER_DISPATCHED import io.infinitic.events.types.TYPE_WORKFLOW -internal fun WorkflowStateEngineCmdMessage.workflowSimpleType(): String? = when (this) { - is DispatchWorkflow -> START - is DispatchMethod -> START_METHOD +internal fun WorkflowStateCmdMessage.workflowSimpleType(): String? = when (this) { + is DispatchWorkflow -> DISPATCH + is DispatchMethod -> DISPATCH_METHOD is CancelWorkflow -> when (workflowMethodId) { null -> CANCEL else -> CANCEL_METHOD @@ -109,7 +109,7 @@ internal fun WorkflowStateEngineCmdMessage.workflowSimpleType(): String? = when } internal fun WorkflowStateEngineMessage.workflowSimpleType(): String? = when (this) { - is WorkflowStateEngineCmdMessage -> null + is WorkflowStateCmdMessage -> null is RemoteTimerCompleted -> TIMER_COMPLETED is RemoteMethodCompleted -> REMOTE_METHOD_COMPLETED is RemoteMethodCanceled -> REMOTE_METHOD_CANCELED @@ -133,10 +133,10 @@ internal fun WorkflowStateEngineMessage.workflowSimpleType(): String? = when (th } } -internal fun WorkflowStateEngineEventMessage.workflowSimpleType(): String = when (this) { - is WorkflowCompletedEvent -> ENDED +internal fun WorkflowStateEventMessage.workflowSimpleType(): String = when (this) { + is WorkflowCompletedEvent -> COMPLETED is WorkflowCanceledEvent -> CANCELED - is MethodCommandedEvent -> START_METHOD + is MethodCommandedEvent -> DISPATCH_METHOD is MethodCompletedEvent -> METHOD_COMPLETED is MethodFailedEvent -> METHOD_FAILED is MethodCanceledEvent -> METHOD_CANCELED @@ -154,11 +154,11 @@ internal fun WorkflowStateEngineEventMessage.workflowSimpleType(): String = when } -fun WorkflowStateEngineCmdMessage.workflowType(): String? = +fun WorkflowStateCmdMessage.workflowType(): String? = this.workflowSimpleType()?.let { "$TYPE_WORKFLOW.$it" } fun WorkflowStateEngineMessage.workflowType(): String? = this.workflowSimpleType()?.let { "$TYPE_WORKFLOW.$it" } -fun WorkflowStateEngineEventMessage.workflowType(): String = +fun WorkflowStateEventMessage.workflowType(): String = "$TYPE_WORKFLOW." + workflowSimpleType() diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToServiceExecutorTopics.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToServiceExecutorTopics.kt new file mode 100644 index 000000000..089656b20 --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToServiceExecutorTopics.kt @@ -0,0 +1,72 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events.listeners + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.messages.Message +import io.infinitic.common.tasks.data.ServiceName +import io.infinitic.common.transport.InfiniticConsumer +import io.infinitic.common.transport.ServiceExecutorEventTopic +import io.infinitic.common.transport.ServiceExecutorRetryTopic +import io.infinitic.common.transport.ServiceExecutorTopic +import io.infinitic.common.transport.SubscriptionType +import io.infinitic.common.transport.TransportMessage +import io.infinitic.common.transport.consumers.Result +import io.infinitic.common.transport.consumers.startConsuming +import io.infinitic.common.transport.create +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.launch + +context(CoroutineScope, KLogger) +internal fun InfiniticConsumer.listenToServiceExecutorTopics( + serviceName: ServiceName, + subscriptionName: String?, + outChannel: Channel, TransportMessage>>, +): Job = launch { + + // Send messages from ServiceExecutorTopic to inChannel + val serviceExecutorSubscription = SubscriptionType.EVENT_LISTENER.create( + ServiceExecutorTopic, + subscriptionName, + ) + buildConsumer(serviceExecutorSubscription, serviceName.toString()) + .startConsuming(outChannel) + + // Send messages from ServiceExecutorEventTopic to inChannel + val serviceExecutorEventSubscription = SubscriptionType.EVENT_LISTENER.create( + ServiceExecutorEventTopic, + subscriptionName, + ) + buildConsumer(serviceExecutorEventSubscription, serviceName.toString()) + .startConsuming(outChannel) + + // Send messages from ServiceExecutorRetryTopic to inChannel + val serviceExecutorRetrySubscription = SubscriptionType.EVENT_LISTENER.create( + ServiceExecutorRetryTopic, + subscriptionName, + ) + buildConsumer(serviceExecutorRetrySubscription, serviceName.toString()) + .startConsuming(outChannel) +} diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToWorkflowExecutorTopics.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToWorkflowExecutorTopics.kt new file mode 100644 index 000000000..bbd969114 --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToWorkflowExecutorTopics.kt @@ -0,0 +1,72 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events.listeners + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.messages.Message +import io.infinitic.common.transport.InfiniticConsumer +import io.infinitic.common.transport.SubscriptionType +import io.infinitic.common.transport.TransportMessage +import io.infinitic.common.transport.WorkflowExecutorEventTopic +import io.infinitic.common.transport.WorkflowExecutorRetryTopic +import io.infinitic.common.transport.WorkflowExecutorTopic +import io.infinitic.common.transport.consumers.Result +import io.infinitic.common.transport.consumers.startConsuming +import io.infinitic.common.transport.create +import io.infinitic.common.workflows.data.workflows.WorkflowName +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.launch + +context(CoroutineScope, KLogger) +internal fun InfiniticConsumer.listenToWorkflowExecutorTopics( + workflowName: WorkflowName, + subscriptionName: String?, + outChannel: Channel, TransportMessage>>, +): Job = launch { + + // Send messages from WorkflowExecutorTopic to inChannel + val workflowExecutorSubscription = SubscriptionType.EVENT_LISTENER.create( + WorkflowExecutorTopic, + subscriptionName, + ) + buildConsumer(workflowExecutorSubscription, workflowName.toString()) + .startConsuming(outChannel) + + // Send messages from WorkflowExecutorEventTopic to inChannel + val workflowExecutorEventSubscription = SubscriptionType.EVENT_LISTENER.create( + WorkflowExecutorEventTopic, + subscriptionName, + ) + buildConsumer(workflowExecutorEventSubscription, workflowName.toString()) + .startConsuming(outChannel) + + // Send messages from WorkflowExecutorRetryTopic to inChannel + val workflowExecutorRetrySubscription = SubscriptionType.EVENT_LISTENER.create( + WorkflowExecutorRetryTopic, + subscriptionName, + ) + buildConsumer(workflowExecutorRetrySubscription, workflowName.toString()) + .startConsuming(outChannel) +} diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToWorkflowStateTopics.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToWorkflowStateTopics.kt new file mode 100644 index 000000000..7980eacff --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/listenToWorkflowStateTopics.kt @@ -0,0 +1,72 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events.listeners + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.messages.Message +import io.infinitic.common.transport.InfiniticConsumer +import io.infinitic.common.transport.SubscriptionType +import io.infinitic.common.transport.TransportMessage +import io.infinitic.common.transport.WorkflowStateCmdTopic +import io.infinitic.common.transport.WorkflowStateEngineTopic +import io.infinitic.common.transport.WorkflowStateEventTopic +import io.infinitic.common.transport.consumers.Result +import io.infinitic.common.transport.consumers.startConsuming +import io.infinitic.common.transport.create +import io.infinitic.common.workflows.data.workflows.WorkflowName +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.launch + +context(CoroutineScope, KLogger) +internal fun InfiniticConsumer.listenToWorkflowStateTopics( + workflowName: WorkflowName, + subscriptionName: String?, + outChannel: Channel, TransportMessage>>, +): Job = launch { + + // Send messages from WorkflowStateCmdTopic to inChannel + val workflowStateCmdSubscription = SubscriptionType.EVENT_LISTENER.create( + WorkflowStateCmdTopic, + subscriptionName, + ) + buildConsumer(workflowStateCmdSubscription, workflowName.toString()) + .startConsuming(outChannel) + + // Send messages from WorkflowStateEngineTopic to inChannel + val workflowStateEngineSubscription = SubscriptionType.EVENT_LISTENER.create( + WorkflowStateEngineTopic, + subscriptionName, + ) + buildConsumer(workflowStateEngineSubscription, workflowName.toString()) + .startConsuming(outChannel) + + // Send messages from WorkflowStateEventTopic to inChannel + val workflowStateEventSubscription = SubscriptionType.EVENT_LISTENER.create( + WorkflowStateEventTopic, + subscriptionName, + ) + buildConsumer(workflowStateEventSubscription, workflowName.toString()) + .startConsuming(outChannel) +} diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/refreshServiceListAsync.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/refreshServiceListAsync.kt new file mode 100644 index 000000000..b8cfd2cc7 --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/refreshServiceListAsync.kt @@ -0,0 +1,58 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events.listeners + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.tasks.data.ServiceName +import io.infinitic.common.transport.InfiniticResources +import io.infinitic.events.config.EventListenerConfig +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch + +context(CoroutineScope, KLogger) +internal fun InfiniticResources.refreshServiceListAsync( + config: EventListenerConfig, + start: suspend (ServiceName) -> Unit +) = launch { + val processedServices = mutableSetOf() + + while (true) { + // Retrieve the list of services + getServices().onSuccess { services -> + val currentServices = services.filter { config.includeService(it) } + + // Determine new services that haven't been processed + val newServices = currentServices.filterNot { it in processedServices } + + // Launch starter for each new service + for (service in newServices) { + start(ServiceName(service)) + // Add the service to the set of processed services + processedServices.add(service) + } + } + + delay(config.serviceListConfig.listRefreshMillis) + } +} diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/refreshWorkflowListAsync.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/refreshWorkflowListAsync.kt new file mode 100644 index 000000000..33863feab --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/refreshWorkflowListAsync.kt @@ -0,0 +1,58 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events.listeners + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.transport.InfiniticResources +import io.infinitic.common.workflows.data.workflows.WorkflowName +import io.infinitic.events.config.EventListenerConfig +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch + +context(CoroutineScope, KLogger) +internal fun InfiniticResources.refreshWorkflowListAsync( + config: EventListenerConfig, + start: suspend (WorkflowName) -> Unit +) = launch { + val processedWorkflows = mutableSetOf() + + while (true) { + // Retrieve the list of workflows + getWorkflows().onSuccess { workflows -> + val currentWorkflows = workflows.filter { config.includeWorkflow(it) } + + // Determine new workflows that haven't been processed + val newWorkflows = currentWorkflows.filterNot { it in processedWorkflows } + + // Launch starter for each new workflow + for (workflow in newWorkflows) { + start(WorkflowName(workflow)) + // Add the workflow to the set of processed workflows + processedWorkflows.add(workflow) + } + } + + delay(config.workflowListConfig.listRefreshMillis) + } +} diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/startCloudEventListener.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/startCloudEventListener.kt new file mode 100644 index 000000000..774f5a343 --- /dev/null +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/listeners/startCloudEventListener.kt @@ -0,0 +1,101 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.events.listeners + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.data.MillisDuration +import io.infinitic.common.exceptions.thisShouldNotHappen +import io.infinitic.common.messages.Message +import io.infinitic.common.transport.BatchConfig +import io.infinitic.common.transport.InfiniticConsumer +import io.infinitic.common.transport.InfiniticResources +import io.infinitic.common.transport.TransportMessage +import io.infinitic.common.transport.consumers.Result +import io.infinitic.common.transport.consumers.acknowledge +import io.infinitic.common.transport.consumers.batchBy +import io.infinitic.common.transport.consumers.batchProcess +import io.infinitic.common.transport.consumers.process +import io.infinitic.events.config.EventListenerConfig +import io.infinitic.events.toCloudEvent +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.launch + +context(CoroutineScope, KLogger) +fun InfiniticConsumer.startCloudEventListener( + resources: InfiniticResources, + config: EventListenerConfig, + cloudEventSourcePrefix: String, +): Job = launch { + + // Channels where all messages consumed from topics are sent + val outChannel = Channel, TransportMessage>>() + + // all messages will have this batch config + val batchConfig = BatchConfig( + batchKey = "cloudEvent", // same for all + maxMessages = config.batchConfig.maxEvents, + maxDuration = MillisDuration(config.batchConfig.maxMillis), + ) + + // Launch the processing of outChannel + launch { + outChannel + .process(config.concurrency) { _, message -> message.deserialize() } + .batchBy { batchConfig } + .batchProcess( + config.concurrency, + { _, _ -> thisShouldNotHappen() }, + { transportMessages, messages -> + val cloudEvents = messages.zip(transportMessages) { message, transportMessage -> + message.toCloudEvent( + transportMessage.topic, + transportMessage.publishTime, + cloudEventSourcePrefix, + ) + }.filterNotNull() + if (cloudEvents.isNotEmpty()) { + config.listener.onEvents(cloudEvents) + } + }, + ) + .acknowledge() + } + + // Listen service topics, for each service found + resources.refreshServiceListAsync(config) { serviceName -> + info { "EventListener starts listening Service $serviceName" } + + listenToServiceExecutorTopics(serviceName, config.subscriptionName, outChannel) + } + + // Listen workflow topics, for each workflow found + resources.refreshWorkflowListAsync(config) { workflowName -> + info { "EventListener starts listening Workflow $workflowName" } + + listenToWorkflowExecutorTopics(workflowName, config.subscriptionName, outChannel) + + listenToWorkflowStateTopics(workflowName, config.subscriptionName, outChannel) + } +} diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/toCloudEvent.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/toCloudEvent.kt index 8754f71aa..71262f951 100644 --- a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/toCloudEvent.kt +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/toCloudEvent.kt @@ -32,142 +32,87 @@ import io.infinitic.common.messages.Message import io.infinitic.common.requester.workflowName import io.infinitic.common.tasks.events.messages.ServiceExecutorEventMessage import io.infinitic.common.tasks.executors.messages.ServiceExecutorMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.transport.Topic +import io.infinitic.common.transport.WorkflowStateEngineTopic +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.events.data.services.serviceType import io.infinitic.events.data.services.toJson import io.infinitic.events.data.workflows.toJson import io.infinitic.events.data.workflows.workflowType import java.net.URI import java.net.URLEncoder -import java.time.OffsetDateTime -fun Message.toServiceCloudEvent(publishedAt: MillisInstant, prefix: String): CloudEvent? = - with(CloudEventContext.SERVICE) { - when (val type = type()) { - null -> null - else -> CloudEventBuilder() - .withId(messageId.toString()) - .withTime(time(publishedAt)) - .withType(type) - .withSubject(subject()) - .withSource(source(prefix)) - .withDataContentType("application/json") - .withoutDataSchema() - .withData(dataBytes()) - .build() - } - } - -fun Message.toWorkflowCloudEvent(publishedAt: MillisInstant, sourcePrefix: String): CloudEvent? = - with(CloudEventContext.WORKFLOW) { - when (val type = type()) { - null -> null - else -> CloudEventBuilder() - .withId(messageId.toString()) - .withTime(time(publishedAt)) - .withType(type) - .withSubject(subject()) - .withSource(source(sourcePrefix)) - .withDataContentType("application/json") - .withoutDataSchema() - .withData(dataBytes()) - .build() - } - } - -enum class CloudEventContext { - - WORKFLOW { - - override fun Message.type(): String? = when (this) { - is WorkflowStateEngineCmdMessage -> workflowType() - is WorkflowStateEngineMessage -> workflowType() - is WorkflowStateEngineEventMessage -> workflowType() - else -> null - } - - override fun Message.subject(): String = when (this) { - is WorkflowStateEngineCmdMessage -> workflowId - is WorkflowStateEngineMessage -> workflowId - is WorkflowStateEngineEventMessage -> workflowId - else -> thisShouldNotHappen() - }.toString() - - override fun Message.source(prefix: String): URI = when (this) { - is WorkflowStateEngineCmdMessage -> workflowName - is WorkflowStateEngineMessage -> workflowName - is WorkflowStateEngineEventMessage -> workflowName - else -> thisShouldNotHappen() - }.let { - URI.create("$prefix/workflows/stateEngine/${it.encoded}") - } - - override fun Message.dataBytes(): ByteArray = when (this) { - is WorkflowStateEngineCmdMessage -> toJson() - is WorkflowStateEngineMessage -> toJson() - is WorkflowStateEngineEventMessage -> toJson() - else -> thisShouldNotHappen() - }.toString().toByteArray() - - override fun Message.time(publishedAt: MillisInstant): OffsetDateTime = when (this) { - is WorkflowStateEngineCmdMessage -> publishedAt - is WorkflowStateEngineMessage -> emittedAt ?: publishedAt - is WorkflowStateEngineEventMessage -> publishedAt - else -> thisShouldNotHappen() - }.toOffsetDateTime() - }, - - SERVICE { - - override fun Message.type(): String? = when (this) { - is ServiceExecutorMessage -> serviceType() - is ServiceExecutorEventMessage -> serviceType() - else -> null - } - - override fun Message.subject(): String = when (this) { - is ServiceExecutorMessage -> taskId - is ServiceExecutorEventMessage -> taskId - else -> thisShouldNotHappen() - }.toString() - - override fun Message.source(prefix: String): URI = when (this) { - is ServiceExecutorMessage -> when (isWorkflowTask()) { - true -> "workflows/executor/${requester.workflowName.encoded}" - false -> "services/executor/${serviceName.encoded}" - } - - is ServiceExecutorEventMessage -> when (isWorkflowTask()) { - true -> "workflows/executor/${requester.workflowName.encoded}" - false -> "services/executor/${serviceName.encoded}" - } - - else -> thisShouldNotHappen() - }.let { - URI.create("$prefix/$it") - } +private fun S.isRedundantIn(topic: Topic<*>) = when (topic) { + is WorkflowStateEngineTopic -> this is WorkflowStateCmdMessage + else -> false +} - override fun Message.dataBytes(): ByteArray = when (this) { - is ServiceExecutorMessage -> toJson() - is ServiceExecutorEventMessage -> toJson() - else -> thisShouldNotHappen() - }.toString().toByteArray() +fun S.toCloudEvent( + topic: Topic<*>, + publishedAt: MillisInstant, + prefix: String +): CloudEvent? = if (isRedundantIn(topic)) null else + when (val type = type()) { + null -> null + else -> CloudEventBuilder() + .withId(messageId.toString()) + .withTime(publishedAt.toOffsetDateTime()) + .withType(type) + .withSubject(subject()) + .withSource(source(prefix)) + .withDataContentType("application/json") + .withoutDataSchema() + .withData(dataBytes()) + .build() + } - override fun Message.time(publishedAt: MillisInstant): OffsetDateTime = - publishedAt.toOffsetDateTime() - }; +private fun Message.type(): String? = when (this) { + is WorkflowStateCmdMessage -> workflowType() + is WorkflowStateEngineMessage -> workflowType() + is WorkflowStateEventMessage -> workflowType() + is ServiceExecutorMessage -> serviceType() + is ServiceExecutorEventMessage -> serviceType() + else -> null +} - companion object { - private val Name?.encoded - get() = URLEncoder.encode(toString(), Charsets.UTF_8) +private fun Message.subject(): String = when (this) { + is WorkflowStateCmdMessage -> workflowId + is WorkflowStateEngineMessage -> workflowId + is WorkflowStateEventMessage -> workflowId + is ServiceExecutorMessage -> taskId + is ServiceExecutorEventMessage -> taskId + else -> thisShouldNotHappen() +}.toString() + +private fun Message.source(prefix: String): URI = when (this) { + is WorkflowStateCmdMessage -> "workflows/stateEngine/${workflowName.encoded}" + is WorkflowStateEngineMessage -> "workflows/stateEngine/${workflowName.encoded}" + is WorkflowStateEventMessage -> "workflows/stateEngine/${workflowName.encoded}" + is ServiceExecutorMessage -> when (isWorkflowTask()) { + true -> "workflows/executor/${requester.workflowName.encoded}" + false -> "services/executor/${serviceName.encoded}" } + is ServiceExecutorEventMessage -> when (isWorkflowTask()) { + true -> "workflows/executor/${requester.workflowName.encoded}" + false -> "services/executor/${serviceName.encoded}" + } - abstract fun Message.time(publishedAt: MillisInstant): OffsetDateTime - abstract fun Message.type(): String? - abstract fun Message.subject(): String - abstract fun Message.source(prefix: String): URI - abstract fun Message.dataBytes(): ByteArray + else -> thisShouldNotHappen() +}.let { + URI.create("$prefix/$it") } + +private fun Message.dataBytes(): ByteArray = when (this) { + is WorkflowStateCmdMessage -> toJson() + is WorkflowStateEngineMessage -> toJson() + is WorkflowStateEventMessage -> toJson() + is ServiceExecutorMessage -> toJson() + is ServiceExecutorEventMessage -> toJson() + else -> thisShouldNotHappen() +}.toString().toByteArray() + +internal val Name?.encoded: String + get() = URLEncoder.encode(toString(), Charsets.UTF_8) diff --git a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/types/types.kt b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/types/types.kt index aab198bdf..d66ebc94a 100644 --- a/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/types/types.kt +++ b/infinitic-cloudevents/src/main/kotlin/io/infinitic/events/types/types.kt @@ -27,9 +27,8 @@ private const val TYPE_DOMAIN = "infinitic" const val TYPE_TASK = "$TYPE_DOMAIN.task" const val TYPE_WORKFLOW = "$TYPE_DOMAIN.workflow" -const val START = "start" +const val DISPATCH = "dispatch" const val CANCEL = "cancel" -const val ENDED = "ended" const val STARTED = "started" const val FAILED = "failed" @@ -52,7 +51,7 @@ const val EXECUTOR_COMPLETED = "executorCompleted" const val EXECUTOR_FAILED = "executorFailed" // events related to workflow methods -const val START_METHOD = "startMethod" +const val DISPATCH_METHOD = "dispatchMethod" const val CANCEL_METHOD = "cancelMethod" const val METHOD_CANCELED = "methodCanceled" const val METHOD_COMPLETED = "methodCompleted" diff --git a/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/CloudEventTests.kt b/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/CloudEventTests.kt index 0f727a6e8..2db3cf1c9 100644 --- a/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/CloudEventTests.kt +++ b/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/CloudEventTests.kt @@ -23,15 +23,16 @@ package io.infinitic.events.messages -import com.fasterxml.jackson.module.kotlin.jsonMapper import io.cloudevents.CloudEvent -import io.cloudevents.jackson.JsonFormat import io.infinitic.cloudEvents.CloudEventListener import io.infinitic.common.clients.data.ClientName +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.data.methods.MethodArgs import io.infinitic.common.exceptions.thisShouldNotHappen import io.infinitic.common.fixtures.TestFactory import io.infinitic.common.messages.Message import io.infinitic.common.requester.ClientRequester +import io.infinitic.common.serDe.SerializedData import io.infinitic.common.tasks.data.ServiceName import io.infinitic.common.tasks.events.messages.ServiceExecutorEventMessage import io.infinitic.common.tasks.events.messages.TaskCompletedEvent @@ -82,19 +83,22 @@ import io.infinitic.common.workflows.engine.messages.TimerDispatchedEvent import io.infinitic.common.workflows.engine.messages.WaitWorkflow import io.infinitic.common.workflows.engine.messages.WorkflowCanceledEvent import io.infinitic.common.workflows.engine.messages.WorkflowCompletedEvent -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage +import io.infinitic.events.config.EventListenerConfig +import io.infinitic.events.toCloudEvent +import io.infinitic.events.toJsonString import io.infinitic.storage.config.InMemoryStorageConfig import io.infinitic.transport.config.InMemoryTransportConfig import io.infinitic.workers.InfiniticWorker -import io.infinitic.workers.config.EventListenerConfig import io.kotest.core.spec.style.StringSpec import io.kotest.matchers.shouldBe import io.mockk.Runs import io.mockk.every import io.mockk.just import io.mockk.mockk +import io.mockk.slot import kotlinx.coroutines.delay import kotlinx.serialization.json.Json import kotlinx.serialization.json.jsonObject @@ -103,9 +107,9 @@ import net.bytebuddy.utility.RandomString import java.net.URI import kotlin.reflect.full.isSubclassOf -private val events = mutableListOf() +private val events = slot>() private val eventListener = mockk { - every { onEvent(capture(events)) } just Runs + every { onEvents(capture(events)) } just Runs } private val transport = InMemoryTransportConfig() @@ -116,80 +120,15 @@ private val worker = InfiniticWorker.builder() .setEventListener( EventListenerConfig.builder() .setListener(eventListener) - .setRefreshDelaySeconds(0.0) - .setConcurrency(2), + .setServiceListRefreshSeconds(0.0) + .setWorkflowListRefreshSeconds(0.0) + .setBatch(10, 0.001) + .setConcurrency(10), ) .build() private suspend fun T.sendToTopic(topic: Topic) { with(transport.producer) { sendTo(topic) } - // wait a bit to let listener do its work - // and the listener to discover new services and workflows - delay(100) -} - -suspend fun main() { - worker.startAsync() - - ServiceExecutorMessage::class.sealedSubclasses.forEach { - events.clear() - val message = TestFactory.random(it, mapOf("serviceName" to ServiceName("ServiceA"))) - message.sendToTopic(ServiceExecutorTopic) - - events.firstOrNull()?.let { event -> - val json = String(JsonFormat().serialize(event)) - println(message) - println(jsonMapper().readTree(json).toPrettyString()) - } - } - - ServiceExecutorEventMessage::class.sealedSubclasses.forEach { - events.clear() - val message = TestFactory.random(it, mapOf("serviceName" to ServiceName("ServiceA"))) - message.sendToTopic(ServiceExecutorEventTopic) - events.firstOrNull()?.let { event -> - val json = String(JsonFormat().serialize(event)) - println(message) - println(jsonMapper().readTree(json).toPrettyString()) - } - } - - WorkflowStateEngineCmdMessage::class.sealedSubclasses.forEach { - events.clear() - val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) - message.sendToTopic(WorkflowStateCmdTopic) - events.firstOrNull()?.let { event -> - val json = String(JsonFormat().serialize(event)) - println(message) - println(jsonMapper().readTree(json).toPrettyString()) - } - } - - WorkflowStateEngineMessage::class.sealedSubclasses.forEach { - if (!it.isSubclassOf(WorkflowStateEngineCmdMessage::class)) { - events.clear() - val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) - message.sendToTopic(WorkflowStateEngineTopic) - events.firstOrNull()?.let { event -> - val json = String(JsonFormat().serialize(event)) - println(message) - println(jsonMapper().readTree(json).toPrettyString()) - } - } - } - - WorkflowStateEngineEventMessage::class.sealedSubclasses.forEach { - events.clear() - val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) - message.sendToTopic(WorkflowStateEventTopic) - events.firstOrNull()?.let { event -> - val json = String(JsonFormat().serialize(event)) - println(message) - println(jsonMapper().readTree(json).toPrettyString()) - } - } - - worker.close() } internal class CloudEventTests : StringSpec( @@ -206,22 +145,46 @@ internal class CloudEventTests : StringSpec( events.clear() } + "Checking ExecuteTask" { + val random = TestFactory.random( + ExecuteTask::class, + mapOf("serviceName" to ServiceName("ServiceA")), + ) + + val args = listOf("a", 1, 2.0, true) + val message = random.copy( + methodArgs = MethodArgs( + args.mapIndexed { _, value -> + SerializedData.encode( + value, + type = value::class.javaObjectType, + jsonViewClass = null, + ) + }.toList(), + ), + ) + val event = message.toCloudEvent(ServiceExecutorTopic, MillisInstant.now(), "test")!! + println(event.toJsonString(true)) + } + ServiceExecutorMessage::class.sealedSubclasses.forEach { "Check ${it.simpleName} event envelope from Service Executor topic" { val message = TestFactory.random(it, mapOf("serviceName" to ServiceName("ServiceA"))) + if (message is ExecuteTask) { + message.methodArgs + } message.sendToTopic(ServiceExecutorTopic) - - // first test is slow down in GitHub + // first test slow down for GitHub delay(2000) - events.size shouldBe 1 - val event = events.first() + events.isCaptured shouldBe true + val event = events.captured.first() event.id shouldBe message.messageId.toString() event.source shouldBe URI("inMemory/services/executor/ServiceA") event.dataContentType shouldBe "application/json" event.subject shouldBe message.taskId.toString() event.type shouldBe when (it) { - ExecuteTask::class -> "infinitic.task.start" + ExecuteTask::class -> "infinitic.task.dispatch" else -> thisShouldNotHappen() } } @@ -232,8 +195,9 @@ internal class CloudEventTests : StringSpec( val message = TestFactory.random(it, mapOf("serviceName" to ServiceName("ServiceA"))) message.sendToTopic(ServiceExecutorEventTopic) - events.size shouldBe 1 - val event = events.first() + delay(100) + events.isCaptured shouldBe true + val event = events.captured.first() event.id shouldBe message.messageId.toString() event.source shouldBe URI("inMemory/services/executor/ServiceA") event.dataContentType shouldBe "application/json" @@ -262,13 +226,14 @@ internal class CloudEventTests : StringSpec( ), ) message.sendToTopic(WorkflowExecutorTopic) + delay(100) - events.size shouldBe 1 - val event = events.first() + events.isCaptured shouldBe true + val event = events.captured.first() event.source shouldBe URI("inMemory/workflows/executor/WorkflowA") event.subject shouldBe message.taskId.toString() event.type shouldBe when (it) { - ExecuteTask::class -> "infinitic.task.start" + ExecuteTask::class -> "infinitic.task.dispatch" else -> thisShouldNotHappen() } } @@ -287,9 +252,10 @@ internal class CloudEventTests : StringSpec( message = (message as TaskCompletedEvent).copy(isDelegated = false) } message.sendToTopic(WorkflowExecutorEventTopic) + delay(100) - events.size shouldBe 1 - val event = events.first() + events.isCaptured shouldBe true + val event = events.captured.first() event.source shouldBe URI("inMemory/workflows/executor/WorkflowA") event.subject shouldBe message.taskId.toString() event.type shouldBe when (it) { @@ -302,10 +268,11 @@ internal class CloudEventTests : StringSpec( } } - WorkflowStateEngineCmdMessage::class.sealedSubclasses.forEach { + WorkflowStateCmdMessage::class.sealedSubclasses.forEach { "Check ${it.simpleName} event envelope from cmd topic" { val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) message.sendToTopic(WorkflowStateCmdTopic) + delay(100) val type = when (it) { CancelWorkflow::class -> when ((message as CancelWorkflow).workflowMethodId) { @@ -315,8 +282,8 @@ internal class CloudEventTests : StringSpec( CompleteTimers::class -> null CompleteWorkflow::class -> null - DispatchMethod::class -> "infinitic.workflow.startMethod" - DispatchWorkflow::class -> "infinitic.workflow.start" + DispatchMethod::class -> "infinitic.workflow.dispatchMethod" + DispatchWorkflow::class -> "infinitic.workflow.dispatch" RetryTasks::class -> "infinitic.workflow.retryTask" RetryWorkflowTask::class -> "infinitic.workflow.retryExecutor" SendSignal::class -> "infinitic.workflow.signal" @@ -324,9 +291,9 @@ internal class CloudEventTests : StringSpec( else -> thisShouldNotHappen() } - events.size shouldBe if (type == null) 0 else 1 - if (events.size == 1) { - val event = events.first() + events.isCaptured shouldBe (type != null) + if (events.isCaptured) { + val event = events.captured.first() event.id shouldBe message.messageId.toString() event.source shouldBe URI("inMemory/workflows/stateEngine/WorkflowA") event.dataContentType shouldBe "application/json" @@ -336,20 +303,22 @@ internal class CloudEventTests : StringSpec( } } - // TODO complete this test and add similar tests for all other events +// TODO complete this test and add similar tests for all other events "Check infinitic.task.dispatched data" { val message = TestFactory.random( mapOf("serviceName" to ServiceName("ServiceA")), ) message.sendToTopic(ServiceExecutorTopic) + delay(100) - events.size shouldBe 1 - val event = events.first() + events.isCaptured shouldBe true + val event = events.captured.first() val json = Json.parseToJsonElement(String(event.data!!.toBytes())).jsonObject json["taskName"]!!.jsonPrimitive.content shouldBe message.methodName.toString() } - WorkflowStateEngineCmdMessage::class.sealedSubclasses.forEach { + events.clear() + WorkflowStateCmdMessage::class.sealedSubclasses.forEach { "No ${it.simpleName} event should come from engine topic" { val message = TestFactory.random( it, @@ -359,12 +328,13 @@ internal class CloudEventTests : StringSpec( ), ) message.sendToTopic(WorkflowStateEngineTopic) - events.size shouldBe 0 + events.isCaptured shouldBe false } } + events.clear() WorkflowStateEngineMessage::class.sealedSubclasses.forEach { - if (!it.isSubclassOf(WorkflowStateEngineCmdMessage::class)) { + if (!it.isSubclassOf(WorkflowStateCmdMessage::class)) { "Check ${it.simpleName} event envelope from engine topic" { val message = TestFactory.random( it, @@ -373,6 +343,7 @@ internal class CloudEventTests : StringSpec( ), ) message.sendToTopic(WorkflowStateEngineTopic) + delay(100) val type = when (it) { RemoteMethodCanceled::class -> "infinitic.workflow.remoteMethodCanceled" @@ -388,9 +359,9 @@ internal class CloudEventTests : StringSpec( else -> thisShouldNotHappen() } - events.size shouldBe if (type == null) 0 else 1 - if (events.size == 1) { - val event = events.first() + events.isCaptured shouldBe (type != null) + if (events.isCaptured) { + val event = events.captured.first() event.id shouldBe message.messageId.toString() event.source shouldBe URI("inMemory/workflows/stateEngine/WorkflowA") event.dataContentType shouldBe "application/json" @@ -401,18 +372,20 @@ internal class CloudEventTests : StringSpec( } } - WorkflowStateEngineEventMessage::class.sealedSubclasses.forEach { + events.clear() + WorkflowStateEventMessage::class.sealedSubclasses.forEach { "Check ${it.simpleName} event envelope from events topic" { val message = TestFactory.random( it, mapOf("workflowName" to WorkflowName("WorkflowA")), ) message.sendToTopic(WorkflowStateEventTopic) + delay(100) val type = when (it) { - WorkflowCompletedEvent::class -> "infinitic.workflow.ended" + WorkflowCompletedEvent::class -> "infinitic.workflow.completed" WorkflowCanceledEvent::class -> "infinitic.workflow.canceled" - MethodCommandedEvent::class -> "infinitic.workflow.startMethod" + MethodCommandedEvent::class -> "infinitic.workflow.dispatchMethod" MethodCompletedEvent::class -> "infinitic.workflow.methodCompleted" MethodFailedEvent::class -> "infinitic.workflow.methodFailed" MethodCanceledEvent::class -> "infinitic.workflow.methodCanceled" @@ -426,9 +399,9 @@ internal class CloudEventTests : StringSpec( else -> thisShouldNotHappen() } - events.size shouldBe 1 - if (events.size == 1) { - val event = events.first() + events.isCaptured shouldBe true + if (events.isCaptured) { + val event = events.captured.first() event.id shouldBe message.messageId.toString() event.source shouldBe URI("inMemory/workflows/stateEngine/WorkflowA") event.dataContentType shouldBe "application/json" @@ -441,3 +414,4 @@ internal class CloudEventTests : StringSpec( ) { } + diff --git a/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/main.kt b/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/main.kt new file mode 100644 index 000000000..b45f94de5 --- /dev/null +++ b/infinitic-cloudevents/src/test/kotlin/io/infinitic/events/messages/main.kt @@ -0,0 +1,127 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ + +package io.infinitic.events.messages + +import com.fasterxml.jackson.module.kotlin.jsonMapper +import io.cloudevents.CloudEvent +import io.cloudevents.jackson.JsonFormat +import io.infinitic.cloudEvents.CloudEventListener +import io.infinitic.common.fixtures.TestFactory +import io.infinitic.common.messages.Message +import io.infinitic.common.tasks.data.ServiceName +import io.infinitic.common.tasks.events.messages.ServiceExecutorEventMessage +import io.infinitic.common.tasks.executors.messages.ServiceExecutorMessage +import io.infinitic.common.transport.ServiceExecutorEventTopic +import io.infinitic.common.transport.ServiceExecutorTopic +import io.infinitic.common.transport.Topic +import io.infinitic.common.transport.WorkflowStateCmdTopic +import io.infinitic.common.transport.WorkflowStateEngineTopic +import io.infinitic.common.transport.WorkflowStateEventTopic +import io.infinitic.common.workflows.data.workflows.WorkflowName +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage +import io.infinitic.events.config.EventListenerConfig +import io.infinitic.storage.config.InMemoryStorageConfig +import io.infinitic.transport.config.InMemoryTransportConfig +import io.infinitic.workers.InfiniticWorker +import io.mockk.every +import io.mockk.mockk +import io.mockk.slot +import kotlinx.coroutines.delay +import kotlin.reflect.full.isSubclassOf + +private val allEvents = mutableListOf() +private val events = slot>() +private val eventListener = mockk { + every { onEvents(capture(events)) } answers { allEvents.addAll(events.captured) } +} + +private val transport = InMemoryTransportConfig() + +private val worker = InfiniticWorker.builder() + .setTransport(transport) + .setStorage(InMemoryStorageConfig.builder()) + .setEventListener( + EventListenerConfig.builder() + .setListener(eventListener) + .setServiceListRefreshSeconds(0.001) + .setWorkflowListRefreshSeconds(0.001) + .setConcurrency(2), + ) + .build() + +private suspend fun T.sendToTopic(topic: Topic) { + with(transport.producer) { sendTo(topic) } +} + +private suspend fun display() { + delay(1200) + allEvents.forEach { event -> + val json = String(JsonFormat().serialize(event)) + println(jsonMapper().readTree(json).toPrettyString()) + } + allEvents.clear() +} + +suspend fun main() { + worker.startAsync() + + ServiceExecutorMessage::class.sealedSubclasses.forEach { + val message = TestFactory.random(it, mapOf("serviceName" to ServiceName("ServiceA"))) + message.sendToTopic(ServiceExecutorTopic) + } + + ServiceExecutorEventMessage::class.sealedSubclasses.forEach { + val message = TestFactory.random(it, mapOf("serviceName" to ServiceName("ServiceA"))) + message.sendToTopic(ServiceExecutorEventTopic) + } + + WorkflowStateCmdMessage::class.sealedSubclasses.forEach { + val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) + message.sendToTopic(WorkflowStateCmdTopic) + } + + WorkflowStateEngineMessage::class.sealedSubclasses.forEach { + if (!it.isSubclassOf(WorkflowStateCmdMessage::class)) { + val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) + message.sendToTopic(WorkflowStateEngineTopic) + } + } + + WorkflowStateEventMessage::class.sealedSubclasses.forEach { + val message = TestFactory.random(it, mapOf("workflowName" to WorkflowName("WorkflowA"))) + message.sendToTopic(WorkflowStateEventTopic) + } + + // wait a bit to let listener do its work + // and the listener to discover new services and workflows + delay(1200) + + display() + + worker.close() +} + + diff --git a/infinitic-common/src/main/kotlin/io/infinitic/clients/Deferred.kt b/infinitic-common/src/main/kotlin/io/infinitic/clients/Deferred.kt index 2d6df402a..5d4175f02 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/clients/Deferred.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/clients/Deferred.kt @@ -29,7 +29,7 @@ interface Deferred { val id: String - fun await(): R + suspend fun await(): R fun cancelAsync(): CompletableFuture diff --git a/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/CloudEventListener.kt b/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/CloudEventListener.kt index 203377345..4fd2b55c7 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/CloudEventListener.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/CloudEventListener.kt @@ -27,6 +27,6 @@ import io.cloudevents.CloudEvent fun interface CloudEventListener { - fun onEvent(event: CloudEvent) + fun onEvents(cloudEvents: List) } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/SelectionConfig.kt b/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/EntityListConfig.kt similarity index 85% rename from infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/SelectionConfig.kt rename to infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/EntityListConfig.kt index 7b7062efb..2921844bb 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/SelectionConfig.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/EntityListConfig.kt @@ -23,11 +23,16 @@ package io.infinitic.cloudEvents @Suppress("unused") -data class SelectionConfig( +data class EntityListConfig( + val listRefreshSeconds: Double = 60.0, val allow: List? = null, val disallow: List = listOf() ) { + val listRefreshMillis = (listRefreshSeconds * 1000).toLong() + init { + require(listRefreshSeconds >= 0) { error("'${::listRefreshSeconds.name}' must be >= 0, but was $listRefreshSeconds") } + allow?.forEach { require(it.isNotEmpty()) { error("'${::allow.name}' must not contain empty element") } } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/logs/loggersName.kt b/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/logs/loggersName.kt deleted file mode 100644 index 68fb287e5..000000000 --- a/infinitic-common/src/main/kotlin/io/infinitic/cloudEvents/logs/loggersName.kt +++ /dev/null @@ -1,38 +0,0 @@ -/** - * "Commons Clause" License Condition v1.0 - * - * The Software is provided to you by the Licensor under the License, as defined below, subject to - * the following condition. - * - * Without limiting other conditions in the License, the grant of rights under the License will not - * include, and the License does not grant to you, the right to Sell the Software. - * - * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you - * under the License to provide to third parties, for a fee or other consideration (including - * without limitation fees for hosting or consulting/ support services related to the Software), a - * product or service whose value derives, entirely or substantially, from the functionality of the - * Software. Any license notice or attribution required by the License must also include this - * Commons Clause License Condition notice. - * - * Software: Infinitic - * - * License: MIT License (https://opensource.org/licenses/MIT) - * - * Licensor: infinitic.io - */ -package io.infinitic.cloudEvents.logs - -private const val INFINITIC_PREFIX = "io.infinitic" -private const val CLOUD_EVENTS = "$INFINITIC_PREFIX.cloudEvents" - -private const val WORKFLOW_STATE_ENGINE = "WorkflowStateEngine" -private const val WORKFLOW_TAG_ENGINE = "WorkflowTagEngine" -private const val WORKFLOW_EXECUTOR = "WorkflowExecutor" -private const val SERVICE_TAG_ENGINE = "ServiceTagEngine" -private const val SERVICE_EXECUTOR = "ServiceExecutor" - -const val LOGS_WORKFLOW_STATE_ENGINE = "$CLOUD_EVENTS.$WORKFLOW_STATE_ENGINE" -const val LOGS_WORKFLOW_TAG_ENGINE = "$CLOUD_EVENTS.$WORKFLOW_TAG_ENGINE" -const val LOGS_WORKFLOW_EXECUTOR = "$CLOUD_EVENTS.$WORKFLOW_EXECUTOR" -const val LOGS_SERVICE_TAG_ENGINE = "$CLOUD_EVENTS.$SERVICE_TAG_ENGINE" -const val LOGS_SERVICE_EXECUTOR = "$CLOUD_EVENTS.$SERVICE_EXECUTOR" diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/tasks/data/TaskMeta.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/tasks/data/TaskMeta.kt index 123468f49..3c4e85350 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/tasks/data/TaskMeta.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/tasks/data/TaskMeta.kt @@ -38,7 +38,7 @@ import java.util.* @Serializable(with = TaskMetaSerializer::class) data class TaskMeta(val map: Map = mutableMapOf()) : Map by map, JsonAble { - + override fun toJson() = JsonObject( map.mapValues { JsonPrimitive(Base64.getEncoder().encodeToString(it.value)) }, ) @@ -54,6 +54,11 @@ data class TaskMeta(val map: Map = mutableMapOf()) : } override fun hashCode() = map.hashCode() + + companion object { + @JvmStatic + val BATCH_KEY = "batchKey" + } } object TaskMetaSerializer : KSerializer { diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticConsumer.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticConsumer.kt index ba4aac565..138e9eb31 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticConsumer.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticConsumer.kt @@ -22,6 +22,7 @@ */ package io.infinitic.common.transport +import io.github.oshai.kotlinlogging.KLogger import io.infinitic.common.data.MillisInstant import io.infinitic.common.messages.Message import kotlinx.coroutines.CoroutineScope @@ -29,6 +30,28 @@ import kotlinx.coroutines.Job interface InfiniticConsumer { + /** + * Builds a list of transport consumers for a given subscription and entity. + * + * @param M The type of the messages to be consumed. + * @param subscription The subscription from which to consume messages. + * @param entity The entity associated with this consumer. + * @param occurrence Optional parameter to specify the number of consumers to build. + * @return A list of transport consumers for the specified subscription and entity. + */ + context(KLogger) + suspend fun buildConsumers( + subscription: Subscription, + entity: String, + occurrence: Int? + ): List>> + + context(KLogger) + suspend fun buildConsumer( + subscription: Subscription, + entity: String, + ): TransportConsumer> = buildConsumers(subscription, entity, null).first() + /** * Starts consuming messages from a given subscription and processes them using the provided handler. * @@ -36,52 +59,37 @@ interface InfiniticConsumer { * * @return a job corresponding to the endless loop processing * - * @param S The type of the messages to be consumed. + * @param M The type of the messages to be consumed. * @param subscription The subscription from which to consume messages. * @param entity The entity associated with this consumer. (typically a service name or workflow name) * @param process The function to handle each consumed message and its publishing time. * @param beforeDlq An optional function to be executed before sending the message to the dead-letter queue (DLQ). * @param concurrency The number of concurrent message handlers to be used. */ - context(CoroutineScope) - suspend fun startAsync( - subscription: Subscription, + context(CoroutineScope, KLogger) + suspend fun startAsync( + subscription: Subscription, entity: String, concurrency: Int, - process: suspend (S, MillisInstant) -> Unit, - beforeDlq: (suspend (S?, Exception) -> Unit)?, - batchConfig: (suspend (S) -> BatchConfig?)? = null, - batchProcess: (suspend (List, List) -> Unit)? = null + process: suspend (M, MillisInstant) -> Unit, + beforeDlq: (suspend (M, Exception) -> Unit)? = null, + batchConfig: (suspend (M) -> BatchConfig?)? = null, + batchProcess: (suspend (List, List) -> Unit)? = null ): Job /** - * Starts consuming messages from a given subscription and processes them using the provided handler. - * - * The CoroutineScope context is used to start the endless loop that listen for messages * - * @param S The type of the messages to be consumed. - * @param subscription The subscription from which to consume messages. - * @param entity The entity associated with this consumer. (typically a service name or workflow name) - * @param process The function to handle each consumed message and its publishing time. - * @param beforeDlq An optional function to be executed before sending the message to the dead-letter queue (DLQ). - * @param concurrency The number of concurrent message handlers to be used. */ - context(CoroutineScope) - suspend fun start( - subscription: Subscription, + context(CoroutineScope, KLogger) + suspend fun start( + subscription: Subscription, entity: String, concurrency: Int, - process: suspend (S, MillisInstant) -> Unit, - beforeDlq: (suspend (S?, Exception) -> Unit)?, - batchConfig: (suspend (S) -> BatchConfig?)? = null, - batchProcess: (suspend (List, List) -> Unit)? = null - ): Unit = startAsync( - subscription, - entity, - concurrency, - process, - beforeDlq, - batchConfig, - batchProcess, + process: suspend (M, MillisInstant) -> Unit, + beforeDlq: (suspend (M, Exception) -> Unit)? = null, + batchConfig: (suspend (M) -> BatchConfig?)? = null, + batchProcess: (suspend (List, List) -> Unit)? = null + ) = startAsync( + subscription, entity, concurrency, process, beforeDlq, batchConfig, batchProcess, ).join() } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticProducer.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticProducer.kt index 037a81b09..9e5ae3937 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticProducer.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/InfiniticProducer.kt @@ -54,7 +54,7 @@ interface InfiniticProducer { after: MillisDuration = MillisDuration(0) ) { require(after <= 0 || topic.acceptDelayed) { thisShouldNotHappen("Trying to send to $topic with a delay $after") } - + // Switch to workflow-related topics for workflowTasks val t = when (this) { is ServiceExecutorMessage -> if (isWorkflowTask()) topic.forWorkflow else topic diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/Topic.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/Topic.kt index c0450f77d..f62b5f423 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/Topic.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/Topic.kt @@ -27,9 +27,9 @@ import io.infinitic.common.messages.Message import io.infinitic.common.tasks.events.messages.ServiceExecutorEventMessage import io.infinitic.common.tasks.executors.messages.ServiceExecutorMessage import io.infinitic.common.tasks.tags.messages.ServiceTagMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.common.workflows.tags.messages.WorkflowTagEngineMessage @@ -102,7 +102,7 @@ data object WorkflowTagEngineTopic : WorkflowTopic() { override val prefix = "workflow-tag" } -data object WorkflowStateCmdTopic : WorkflowTopic() { +data object WorkflowStateCmdTopic : WorkflowTopic() { override val prefix = "workflow-cmd" } @@ -114,7 +114,7 @@ data object WorkflowStateTimerTopic : WorkflowTopic( override val prefix = "workflow-delay" } -data object WorkflowStateEventTopic : WorkflowTopic() { +data object WorkflowStateEventTopic : WorkflowTopic() { override val prefix = "workflow-events" } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportConsumer.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportConsumer.kt index 2b1cf9894..bb59594b7 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportConsumer.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportConsumer.kt @@ -22,15 +22,22 @@ */ package io.infinitic.common.transport -import kotlinx.coroutines.future.await -import java.util.concurrent.CompletableFuture +interface TransportConsumer> { + /** + * Receives a transport message from the consumer. + * + * @return A message of type [T] received from the transport. + */ + suspend fun receive(): T -interface TransportConsumer { - fun receiveAsync(): CompletableFuture + /** + * Defines the maximum number of times a message can be redelivered + * when processing messages from a transport consumer. + */ + val maxRedeliveryCount: Int - fun acknowledgeAsync(message: T): CompletableFuture - fun negativeAcknowledgeAsync(message: T): CompletableFuture - - suspend fun acknowledge(message: T): Unit = acknowledgeAsync(message).await() - suspend fun negativeAcknowledge(message: T): Unit = negativeAcknowledgeAsync(message).await() + /** + * Represents the name of the TransportConsumer. Used for Logging only + */ + val name: String } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportMessage.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportMessage.kt index b6d4dee89..369e4964c 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportMessage.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/TransportMessage.kt @@ -24,8 +24,37 @@ package io.infinitic.common.transport import io.infinitic.common.data.MillisInstant -interface TransportMessage { - val messageId: String - val redeliveryCount: Int +/** + * Represents a transport message that can be deserialized. + * + * @param M The type of the payload contained within the message. + */ +interface TransportMessage { val publishTime: MillisInstant + val messageId: String + val topic: Topic<*> + + /** + * Deserializes the message into its original form. + * + * @return The deserialized message. + */ + fun deserialize(): M + + /** + * Acknowledges the given message. + */ + suspend fun acknowledge() + + /** + * Processes a negative acknowledgment for the given message. + */ + suspend fun negativeAcknowledge() + + /** + * This property reflects the state where the message has failed to process successfully + * repeatedly, and the total count of negative acknowledgments has reached a predefined limit + * after which the message will be sent to DLQ + */ + val hasBeenSentToDeadLetterQueue: Boolean } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/OneOrMany.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/OneOrMany.kt index bbdb01960..666abb022 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/OneOrMany.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/OneOrMany.kt @@ -29,10 +29,10 @@ package io.infinitic.common.transport.consumers */ sealed interface OneOrMany -internal class One(val datum: D) : OneOrMany { +class One(val datum: D) : OneOrMany { override fun toString() = "One(${datum.toString()})" } -internal class Many(val data: List) : OneOrMany { +class Many(val data: List) : OneOrMany { override fun toString() = "Many($data})" } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumer.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumer.kt deleted file mode 100644 index 4ed98037c..000000000 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumer.kt +++ /dev/null @@ -1,140 +0,0 @@ -/** - * "Commons Clause" License Condition v1.0 - * - * The Software is provided to you by the Licensor under the License, as defined below, subject to - * the following condition. - * - * Without limiting other conditions in the License, the grant of rights under the License will not - * include, and the License does not grant to you, the right to Sell the Software. - * - * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you - * under the License to provide to third parties, for a fee or other consideration (including - * without limitation fees for hosting or consulting/ support services related to the Software), a - * product or service whose value derives, entirely or substantially, from the functionality of the - * Software. Any license notice or attribution required by the License must also include this - * Commons Clause License Condition notice. - * - * Software: Infinitic - * - * License: MIT License (https://opensource.org/licenses/MIT) - * - * Licensor: infinitic.io - */ -package io.infinitic.common.transport.consumers - -import io.github.oshai.kotlinlogging.KLogger -import io.github.oshai.kotlinlogging.KotlinLogging -import io.infinitic.common.data.MillisInstant -import io.infinitic.common.transport.BatchConfig -import io.infinitic.common.transport.TransportConsumer -import io.infinitic.common.transport.TransportMessage -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.Job -import kotlinx.coroutines.launch - -/** - * A generic consumer for messages that handles - * - deserialization, - * - single and batch processing, - * - acknowledgement and negative acknowledgement. - * - * @param S The type of the message implementing the interface [TransportMessage]. - * @param D The type of the deserialized message. - * - * @param consumer the transport consumer responsible for receiving messages. - * @param beforeNegativeAcknowledgement A suspend function called before negatively acknowledging a message. - */ -class ProcessorConsumer( - private val consumer: TransportConsumer, - private val beforeNegativeAcknowledgement: (suspend (S, D?, Exception) -> Unit)?, -) { - - /** - * Starts an asynchronous operation to process messages received by the consumer. - * - * Constraints: - * - when calling scope is canceled, all ongoing messages should be processed, and the job terminated - * - an exception is handled - * - an error triggers a scope cancelling - * - * @param concurrency The number of concurrent coroutines to use for processing. - * @param deserialize A suspending function to deserialize messages. - * @param process A suspending function to process deserialized messages. - * @param batchConfig A suspending function to provide batch configuration for deserialized messages, nullable. - * @param batchProcess A suspending function to process batches of deserialized messages, nullable. - * @return A Job representing the coroutine for the asynchronous operation. - */ - fun CoroutineScope.startAsync( - concurrency: Int, - deserialize: suspend (S) -> D, - process: suspend (D, MillisInstant) -> Unit, - batchConfig: (suspend (D) -> BatchConfig?)? = null, - batchProcess: (suspend (List, List) -> Unit)? = null, - ): Job = launch { - with(logger) { - consumer - .startConsuming() - .process( - concurrency, - { _, message -> deserialize(message) }, - ) - .batchBy { datum -> batchConfig?.let { it(datum) } } - .batchProcess( - concurrency, - { message, datum -> process(datum, message.publishTime); datum }, - { messages, data -> batchProcess!!(data, messages.map { it.publishTime }); data }, - ) - .collect { result -> - val message = result.message() - result.onSuccess { - acknowledge(message, it) - } - result.onFailure { - val d = try { - deserialize(message) - } catch (e: Exception) { - null - } - negativeAcknowledge(message, d, it) - } - } - } - } - - context(KLogger) - private suspend fun acknowledge(message: S, deserialize: D) = try { - consumer.acknowledge(message) - } catch (e: Exception) { - logWarn(e) { "Error when acknowledging ${deserialize.string}" } - } - - context(KLogger) - private suspend fun negativeAcknowledge(message: S, deserialized: D?, e: Exception) { - try { - beforeNegativeAcknowledgement?.let { it(message, deserialized, e) } - } catch (e: Exception) { - logWarn(e) { - "Error when calling negative acknowledgement hook for message " + - (deserialized?.string ?: message.messageId) - } - } - try { - consumer.negativeAcknowledge(message) - } catch (e: Exception) { - logWarn(e) { - "Error when negatively acknowledging message " + - (deserialized?.string ?: message.messageId) - } - } - } - - private fun logWarn(e: Exception, message: () -> String) = try { - logger.warn(e, message) - } catch (e: Exception) { - System.err.println(message()) - } - - companion object { - private val logger = KotlinLogging.logger {} - } -} diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/Result.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/Result.kt index 4adb4db61..4d7b00829 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/Result.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/Result.kt @@ -22,7 +22,7 @@ */ package io.infinitic.common.transport.consumers -internal class Result internal constructor( +class Result internal constructor( private val message: M, private val value: Any? ) { @@ -71,13 +71,13 @@ internal class Result internal constructor( /** * Returns an instance that encapsulates the given [value] as successful value. */ - fun success(message: M, value: T): Result = + fun success(message: M, value: T): Result = Result(message, value) /** * Returns an instance that encapsulates the given [Exception] [exception] as failure. */ - fun failure(message: M, exception: Exception): Result = + fun failure(message: M, exception: Exception): Result = Result(message, Failure(exception)) } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/acknowledge.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/acknowledge.kt new file mode 100644 index 000000000..3809293a1 --- /dev/null +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/acknowledge.kt @@ -0,0 +1,93 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.common.transport.consumers + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.transport.TransportMessage +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.channels.Channel + +/** + * Collect messages from the channel and acknowledge or handle failures accordingly. + * + * @param T The type of the transport message extending `TransportMessage`. + * @param M The type of the message contained within the transport message. + * @param I The type of additional information carried by the result. + * @param beforeDlq A suspending function that will be executed before a message is sent to the dead-letter queue. + */ +context(CoroutineScope, KLogger) +fun , M : Any, I> Channel>.acknowledge( + beforeDlq: (suspend (M, Exception) -> Unit)? = null, +) = collect { result -> + val message = result.message() + + result.onSuccess { + message.tryAcknowledge() + } + + result.onFailure { exception -> + // negative acknowledge this failed message + message.tryNegativeAcknowledge() + + // if the message reached a pre-defined number of negative acknowledgment, + // il will be automatically be sent to Dead-Letter-Queue + // In that case, we try to run the `beforeDlq` function + if (message.hasBeenSentToDeadLetterQueue) { + val deserialized = message.deserializeOrNull() + // log as error, as this failed message will now be skipped + error(exception) { + "Sending message to DLQ: ${deserialized?.string ?: message.messageId}" + } + if (deserialized != null && beforeDlq != null) { + try { + beforeDlq(deserialized, exception) + } catch (e: Exception) { + warn(e) { + "Error when calling dead letter hook for message ${deserialized.string}" + } + } + } + } + } +} + +context(KLogger) +private suspend fun TransportMessage.tryAcknowledge() = try { + acknowledge() +} catch (e: Exception) { + warn(e) { "Error when acknowledging ${deserializeOrNull()?.string ?: messageId}" } +} + +context(KLogger) +private suspend fun TransportMessage.tryNegativeAcknowledge() = try { + negativeAcknowledge() +} catch (e: Exception) { + warn(e) { "Error when negatively acknowledging ${deserializeOrNull()?.string ?: messageId}" } + null +} + +private fun TransportMessage.deserializeOrNull(): S? = try { + deserialize() +} catch (e: Exception) { + null +} diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchBy.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchBy.kt index 37de11f67..295503d55 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchBy.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchBy.kt @@ -46,7 +46,7 @@ import kotlinx.coroutines.withContext * either a [SingleMessage] or a [MultipleMessages] instance. */ context(CoroutineScope, KLogger) -internal fun Channel>.batchBy( +fun Channel>.batchBy( getBatchConfig: suspend (I) -> BatchConfig?, ): Channel>> { val callingScope: CoroutineScope = this@CoroutineScope @@ -54,6 +54,8 @@ internal fun Channel>.batchBy( // output channel where result after processing are sent val outputChannel = Channel>>() + debug { "batchBy: starting listening channel ${this@batchBy.hashCode()}" } + // channels where messages are sent to be batched (one channel per key) val batchingMutex = Mutex() val batchingChannels = mutableMapOf>>() @@ -70,7 +72,7 @@ internal fun Channel>.batchBy( ): Channel> = Channel>().also { debug { "batchBy: adding producer to batching channel ${it.hashCode()}" } it.addProducer() - trace { "batchBy: added producer to batching channel ${it.hashCode()}" } + trace { "batchBy: producer added to batching channel ${it.hashCode()}" } it.startBatching(maxMessages, maxDuration, outputChannel) } @@ -84,7 +86,7 @@ internal fun Channel>.batchBy( debug { "batchBy: adding producer to output channel ${outputChannel.hashCode()}" } outputChannel.addProducer() - trace { "batchBy: added producer to output channel ${outputChannel.hashCode()}" } + trace { "batchBy: producer added to output channel ${outputChannel.hashCode()}" } // For batching channels, the addProducer method is called at creation while (true) { try { @@ -116,7 +118,7 @@ internal fun Channel>.batchBy( outputChannel.removeProducer() batchingMutex.withLock { batchingChannels.forEach { (key, channel) -> - trace { "batchBy: exiting, removing producer from $key batching channel ${channel.hashCode()}" } + trace { "batchBy: exited, producer removed from $key batching channel ${channel.hashCode()}" } channel.removeProducer() } } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchProcess.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchProcess.kt index d83057ce3..f64883a52 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchProcess.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/batchProcess.kt @@ -35,19 +35,24 @@ import kotlinx.coroutines.withContext * Processes elements received in this channel either individually or in batches, * allowing for concurrent processing and returning the results in an output channel. * + * To prevent some sneaky errors that would lead to un-acknowledged messages when the batchProcess + * function filters some messages, we restrict the batch processing to function that returns nothing. + * * @param concurrency The number of concurrent coroutines that will process the elements. Default is 1. * @param singleProcess Function to process a single element. * @param batchProcess Function to process a batch of elements. * @return A channel where the results of the processed elements will be sent. */ context(CoroutineScope, KLogger) -internal fun Channel>>.batchProcess( +fun Channel>>.batchProcess( concurrency: Int = 1, - singleProcess: suspend (M, I) -> O, - batchProcess: suspend (List, List) -> List, -): Channel> { + singleProcess: suspend (M, I) -> Unit, + batchProcess: suspend (List, List) -> Unit, +): Channel> { val callingScope: CoroutineScope = this@CoroutineScope - val outputChannel: Channel> = Channel() + val outputChannel: Channel> = Channel() + + debug { "batchProcess: starting listening channel ${this@batchProcess.hashCode()}" } suspend fun process(one: One>) { val result = one.datum @@ -56,8 +61,8 @@ internal fun Channel>>.batchProcess( } if (result.isSuccess) { try { - val o = singleProcess(result.message(), result.value()) - outputChannel.send(result.success(o)) + singleProcess(result.message(), result.value()) + outputChannel.send(result) } catch (e: Exception) { outputChannel.send(result.failure(e)) } @@ -70,10 +75,8 @@ internal fun Channel>>.batchProcess( // At this point, all results should be a success val values = results.map { it.value() } try { - val output = batchProcess(messages, values) - messages.zip(output).forEach { (message, value) -> - outputChannel.send(Result.success(message, value)) - } + batchProcess(messages, values) + results.forEach { outputChannel.send(it) } } catch (e: Exception) { warn(e) { "batchProcess: exception when batch processing messages: ${messages.map { it.string }}" } messages.forEach { message -> @@ -84,11 +87,11 @@ internal fun Channel>>.batchProcess( launch { withContext(NonCancellable) { - repeat(concurrency) { + repeat(concurrency) { index -> launch { - debug { "batchProcess: adding producer $it to ${outputChannel.hashCode()}" } + debug { "batchProcess: adding producer $index to ${outputChannel.hashCode()}" } outputChannel.addProducer() - trace { "batchProcess: added producer $it to ${outputChannel.hashCode()}" } + trace { "batchProcess: producer $index added to ${outputChannel.hashCode()}" } while (true) { try { // the only way to quit this loop is to close the input channel @@ -107,9 +110,9 @@ internal fun Channel>>.batchProcess( callingScope.cancel() } } - debug { "batchProcess: removing producer $it to ${outputChannel.hashCode()}" } + debug { "batchProcess: exiting, removing producer $index from ${outputChannel.hashCode()}" } outputChannel.removeProducer() - trace { "batchProcess: removed producer $it to ${outputChannel.hashCode()}" } + trace { "batchProcess: exited, producer $index removed from ${outputChannel.hashCode()}" } } } } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/channels.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/channels.kt index 0e931e54b..fbd017cd0 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/channels.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/channels.kt @@ -34,13 +34,13 @@ private val producerCounters = mutableMapOf, Int>() context(KLogger) internal suspend fun Channel<*>.addProducer() = producersMutex.withLock { - debug { "Adding one producer from ${producerCounters[this]} to channel ${this.hashCode()}" } + trace { "Adding one producer from ${producerCounters[this]} to channel ${this.hashCode()}" } producerCounters[this] = (producerCounters[this] ?: 0) + 1 } context(KLogger) internal suspend fun Channel<*>.removeProducer() = producersMutex.withLock { - debug { "Removing one producer from ${producerCounters[this]} from channel ${this.hashCode()}" } + trace { "Removing one producer from ${producerCounters[this]} from channel ${this.hashCode()}" } when (val count = producerCounters[this]) { null -> thisShouldNotHappen() 1 -> { diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/collect.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/collect.kt index 17f6adcda..3dd2a589a 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/collect.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/collect.kt @@ -38,11 +38,13 @@ import kotlinx.coroutines.withContext * If null, no processing is applied. */ context(CoroutineScope, KLogger) -internal fun Channel.collect( +fun Channel.collect( process: (suspend (S) -> Unit)? = null ) { val callingScope: CoroutineScope = this@CoroutineScope + debug { "collect: starting listening channel ${this@collect.hashCode()}" } + launch { withContext(NonCancellable) { while (true) { diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/completeProcess.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/completeProcess.kt new file mode 100644 index 000000000..054dd80ee --- /dev/null +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/completeProcess.kt @@ -0,0 +1,50 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.common.transport.consumers + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.transport.BatchConfig +import io.infinitic.common.transport.TransportMessage +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.channels.Channel + +context(CoroutineScope, KLogger) +fun , M : Any> Channel>.completeProcess( + concurrency: Int, + deserialize: suspend (T) -> M, + process: suspend (M, MillisInstant) -> Unit, + beforeDlq: (suspend (M, Exception) -> Unit)? = null, + batchConfig: (suspend (M) -> BatchConfig?)? = null, + batchProcess: (suspend (List, List) -> Unit)? = null +): Unit = this + .process(concurrency, { _, message -> deserialize(message) }) + .batchBy { datum -> batchConfig?.invoke(datum) } + .batchProcess( + concurrency, + { message, datum -> process(datum, message.publishTime) }, + { messages, data -> batchProcess!!(data, messages.map { it.publishTime }) }, + ) + .acknowledge(beforeDlq) + + diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/process.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/process.kt index f64034868..ada466ff2 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/process.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/process.kt @@ -44,19 +44,23 @@ import kotlinx.coroutines.withContext * @return A new channel that contains the processed results. */ context(CoroutineScope, KLogger) -internal fun Channel>.process( +fun Channel>.process( concurrency: Int = 1, process: suspend (M, I) -> O, ): Channel> { val callingScope = this@CoroutineScope val outputChannel = Channel>() + debug { "process: starting listening channel ${this@process.hashCode()}" } + launch { // start a non cancellable scope withContext(NonCancellable) { - repeat(concurrency) { + repeat(concurrency) { index -> launch { + debug { "process: adding producer $index to output channel ${outputChannel.hashCode()}" } outputChannel.addProducer() + trace { "process: producer added $index to output channel ${outputChannel.hashCode()}" } while (true) { try { // the only way to quit this loop is to close the input channel @@ -81,9 +85,9 @@ internal fun Channel>.process( callingScope.cancel() } } + debug { "process: exiting, removing producer $index from output channel ${outputChannel.hashCode()}" } outputChannel.removeProducer() - // cancel current scope - trace { "process: exiting" } + trace { "process: exited, producer $index removed from output channel ${outputChannel.hashCode()}" } } } } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startAsync.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startAsync.kt new file mode 100644 index 000000000..8531fba53 --- /dev/null +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startAsync.kt @@ -0,0 +1,62 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.common.transport.consumers + +import io.github.oshai.kotlinlogging.KLogger +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.transport.BatchConfig +import io.infinitic.common.transport.TransportConsumer +import io.infinitic.common.transport.TransportMessage +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Job +import kotlinx.coroutines.launch + +/** + * Starts consuming transport messages asynchronously with the given parameters. + * + * @param concurrency The number of concurrent coroutines for processing messages. + * @param deserialize A suspending function to deserialize the transport message into its payload. + * @param process A suspending function to process the deserialized message along with its publishing time. + * @param batchConfig An optional suspending function to configure batching of messages. + * @param batchProcess An optional suspending function to process a batch of messages. + * @return A Job representing the coroutine that runs the consuming process. + */ +context(CoroutineScope, KLogger) +fun , M : Any> TransportConsumer.startAsync( + concurrency: Int, + deserialize: suspend (T) -> M, + process: suspend (M, MillisInstant) -> Unit, + beforeDlq: (suspend (M, Exception) -> Unit)? = null, + batchConfig: (suspend (M) -> BatchConfig?)? = null, + batchProcess: (suspend (List, List) -> Unit)? = null, +): Job = launch { + startConsuming() + .completeProcess( + concurrency, + deserialize, + process, + beforeDlq, + batchConfig, + batchProcess, + ) +} diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startBatching.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startBatching.kt index b12bc6bd6..115c559e2 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startBatching.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startBatching.kt @@ -40,10 +40,10 @@ import kotlinx.coroutines.withTimeoutOrNull * @param outputChannel The channel to send the batched messages to. Defaults to a new channel. */ context(CoroutineScope, KLogger) -internal fun Channel>.startBatching( +fun Channel.startBatching( maxMessages: Int, maxDuration: Long, - outputChannel: Channel>>, + outputChannel: Channel>, ) = launch { var isOpen = true debug { "batching: adding producer on output channel ${this.hashCode()} " } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startConsuming.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startConsuming.kt index 83cd0f352..04123c14b 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startConsuming.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/consumers/startConsuming.kt @@ -26,11 +26,12 @@ import io.github.oshai.kotlinlogging.KLogger import io.infinitic.common.transport.TransportConsumer import io.infinitic.common.transport.TransportMessage import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.NonCancellable import kotlinx.coroutines.cancel import kotlinx.coroutines.channels.Channel -import kotlinx.coroutines.future.await import kotlinx.coroutines.isActive import kotlinx.coroutines.launch +import kotlinx.coroutines.withContext import kotlin.coroutines.cancellation.CancellationException /** @@ -41,14 +42,20 @@ import kotlin.coroutines.cancellation.CancellationException * @return A channel that emits Result objects containing the original and resulting messages. */ context(CoroutineScope, KLogger) -internal fun TransportConsumer.startConsuming(): Channel> { - val channel = Channel>() - val scope = this@CoroutineScope +fun , M> TransportConsumer.startConsuming( + channel: Channel, TransportMessage>> = Channel(), +): Channel> { + debug { "startConsuming: starting producing on channel ${channel.hashCode()} from ${this@startConsuming.name}" } - scope.launch { + launch { + debug { "startConsuming: adding producer to consuming channel ${channel.hashCode()}" } + channel.addProducer() + trace { "startConsuming: producer added to consuming channel ${channel.hashCode()}" } while (isActive) { try { - val msg = receiveAsync().await().also { trace { "consuming: received $it" } } + val msg = receive().also { + trace { "consuming: received $it from ${this@startConsuming.name}" } + } channel.send(Result.success(msg, msg)) } catch (e: CancellationException) { // do nothing, will exit if calling scope is not active anymore @@ -56,13 +63,18 @@ internal fun TransportConsumer.startConsuming(): Chann warn(e) { "Exception when receiving message from $this" } } catch (e: Error) { warn(e) { "Error when receiving message from $this" } - // canceling current scope - scope.cancel() + // canceling current scope (warning scope is different from inside launch) + // that's why we define the scope variable at the very beginning + this@CoroutineScope.cancel() } } - debug { "consuming: exiting" } - channel.close() + withContext(NonCancellable) { + debug { "startConsuming: exiting, removing producer from consuming channel ${channel.hashCode()}" } + channel.removeProducer() + trace { "startConsuming: exited, producer removed from consuming channel ${channel.hashCode()}" } + } } - return channel + @Suppress("UNCHECKED_CAST") + return channel as Channel> } diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/logged/LoggedInfiniticConsumer.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/transport/logged/LoggedInfiniticConsumer.kt deleted file mode 100644 index a56092fc1..000000000 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/transport/logged/LoggedInfiniticConsumer.kt +++ /dev/null @@ -1,74 +0,0 @@ -/** - * "Commons Clause" License Condition v1.0 - * - * The Software is provided to you by the Licensor under the License, as defined below, subject to - * the following condition. - * - * Without limiting other conditions in the License, the grant of rights under the License will not - * include, and the License does not grant to you, the right to Sell the Software. - * - * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you - * under the License to provide to third parties, for a fee or other consideration (including - * without limitation fees for hosting or consulting/ support services related to the Software), a - * product or service whose value derives, entirely or substantially, from the functionality of the - * Software. Any license notice or attribution required by the License must also include this - * Commons Clause License Condition notice. - * - * Software: Infinitic - * - * License: MIT License (https://opensource.org/licenses/MIT) - * - * Licensor: infinitic.io - */ -package io.infinitic.common.transport.logged - -import io.github.oshai.kotlinlogging.KLogger -import io.infinitic.common.data.MillisInstant -import io.infinitic.common.messages.Message -import io.infinitic.common.transport.BatchConfig -import io.infinitic.common.transport.InfiniticConsumer -import io.infinitic.common.transport.Subscription -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.Job - -class LoggedInfiniticConsumer( - private val logger: KLogger, - private val consumer: InfiniticConsumer, -) : InfiniticConsumer { - - context(CoroutineScope) - override suspend fun startAsync( - subscription: Subscription, - entity: String, - concurrency: Int, - process: suspend (S, MillisInstant) -> Unit, - beforeDlq: (suspend (S?, Exception) -> Unit)?, - batchConfig: (suspend (S) -> BatchConfig?)?, - batchProcess: (suspend (List, List) -> Unit)? - ): Job { - val loggedHandler: suspend (S, MillisInstant) -> Unit = { message, instant -> - logger.debug { formatLog(message.id(), "Processing:", message) } - process(message, instant) - logger.trace { formatLog(message.id(), "Processed:", message) } - } - - val loggedBeforeDlq: suspend (S?, Exception) -> Unit = { message, e -> - logger.error(e) { "Sending message to DLQ: ${message ?: "(Not Deserialized)"}." } - beforeDlq?.let { - logger.debug { "BeforeDlq processing..." } - it(message, e) - logger.trace { "BeforeDlq processed." } - } - } - - return consumer.startAsync( - subscription, - entity, - concurrency, - loggedHandler, - loggedBeforeDlq, - batchConfig, - batchProcess, - ) - } -} diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/utils/BatchUtil.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/utils/BatchUtil.kt index 628ad5064..ee0a55d63 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/utils/BatchUtil.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/utils/BatchUtil.kt @@ -82,23 +82,25 @@ fun Class<*>.getBatchMethods(): List = 1 -> if (!batch.hasBatchReturnTypeOf(singles[0])) throw Exception( "The return type of the @Batch method $name:${batch.name} should be " + - "List<${singles[0].genericReturnType}>, but is ${batch.genericReturnType}", + "Map, but is ${batch.genericReturnType}", ) else -> { // This is to cover a special case in Kotlin where the single method returns - // Nothing ( fun myMethod() = thiswhouldNnothappen()) + // Nothing ( fun myMethod() = thisshouldnothappen()) singles.forEach { single -> if ((single.returnType != Void::class.java) && !batch.hasBatchReturnTypeOf(single)) throw Exception( "The return type of the @Batch method $name:${batch.name} should be " + - "List<${single.genericReturnType}>, but is ${batch.genericReturnType}", + "Mat, but is ${batch.genericReturnType}", ) } } } } }.map { (batch, singles) -> + // ensure batch method is accessible even if private + batch.isAccessible = true singles.map { single -> BatchMethod( single, diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelope.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelope.kt index b80603c90..3ff0aba88 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelope.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelope.kt @@ -33,7 +33,7 @@ import org.apache.avro.Schema @AvroNamespace("io.infinitic.workflows.engine") data class WorkflowCmdEnvelope( private val workflowId: WorkflowId, - private val type: WorkflowStateEngineCmdMessageType, + private val type: WorkflowStateCmdMessageType, private val dispatchWorkflow: DispatchWorkflow? = null, private val dispatchMethod: DispatchMethod? = null, private val waitWorkflow: WaitWorkflow? = null, @@ -43,7 +43,7 @@ data class WorkflowCmdEnvelope( private val completeTimers: CompleteTimers? = null, private val completeWorkflow: CompleteWorkflow? = null, private val sendSignal: SendSignal? = null, -) : Envelope { +) : Envelope { init { val noNull = listOfNotNull( dispatchWorkflow, @@ -63,59 +63,59 @@ data class WorkflowCmdEnvelope( } companion object { - fun from(msg: WorkflowStateEngineCmdMessage) = when (msg) { + fun from(msg: WorkflowStateCmdMessage) = when (msg) { is DispatchWorkflow -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.DISPATCH_WORKFLOW, + type = WorkflowStateCmdMessageType.DISPATCH_WORKFLOW, dispatchWorkflow = msg, ) is DispatchMethod -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.DISPATCH_METHOD, + type = WorkflowStateCmdMessageType.DISPATCH_METHOD, dispatchMethod = msg, ) is WaitWorkflow -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.WAIT_WORKFLOW, + type = WorkflowStateCmdMessageType.WAIT_WORKFLOW, waitWorkflow = msg, ) is CancelWorkflow -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.CANCEL_WORKFLOW, + type = WorkflowStateCmdMessageType.CANCEL_WORKFLOW, cancelWorkflow = msg, ) is RetryWorkflowTask -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.RETRY_WORKFLOW_TASK, + type = WorkflowStateCmdMessageType.RETRY_WORKFLOW_TASK, retryWorkflowTask = msg, ) is RetryTasks -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.RETRY_TASKS, + type = WorkflowStateCmdMessageType.RETRY_TASKS, retryTasks = msg, ) is CompleteTimers -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.COMPLETE_TIMERS, + type = WorkflowStateCmdMessageType.COMPLETE_TIMERS, completeTimers = msg, ) is CompleteWorkflow -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.COMPLETE_WORKFLOW, + type = WorkflowStateCmdMessageType.COMPLETE_WORKFLOW, completeWorkflow = msg, ) is SendSignal -> WorkflowCmdEnvelope( workflowId = msg.workflowId, - type = WorkflowStateEngineCmdMessageType.SEND_SIGNAL, + type = WorkflowStateCmdMessageType.SEND_SIGNAL, sendSignal = msg, ) } @@ -128,16 +128,16 @@ data class WorkflowCmdEnvelope( val writerSchema = AvroSerDe.currentSchema(serializer()) } - override fun message(): WorkflowStateEngineCmdMessage = when (type) { - WorkflowStateEngineCmdMessageType.DISPATCH_WORKFLOW -> dispatchWorkflow - WorkflowStateEngineCmdMessageType.DISPATCH_METHOD -> dispatchMethod - WorkflowStateEngineCmdMessageType.WAIT_WORKFLOW -> waitWorkflow - WorkflowStateEngineCmdMessageType.CANCEL_WORKFLOW -> cancelWorkflow - WorkflowStateEngineCmdMessageType.RETRY_WORKFLOW_TASK -> retryWorkflowTask - WorkflowStateEngineCmdMessageType.RETRY_TASKS -> retryTasks - WorkflowStateEngineCmdMessageType.COMPLETE_TIMERS -> completeTimers - WorkflowStateEngineCmdMessageType.COMPLETE_WORKFLOW -> completeWorkflow - WorkflowStateEngineCmdMessageType.SEND_SIGNAL -> sendSignal + override fun message(): WorkflowStateCmdMessage = when (type) { + WorkflowStateCmdMessageType.DISPATCH_WORKFLOW -> dispatchWorkflow + WorkflowStateCmdMessageType.DISPATCH_METHOD -> dispatchMethod + WorkflowStateCmdMessageType.WAIT_WORKFLOW -> waitWorkflow + WorkflowStateCmdMessageType.CANCEL_WORKFLOW -> cancelWorkflow + WorkflowStateCmdMessageType.RETRY_WORKFLOW_TASK -> retryWorkflowTask + WorkflowStateCmdMessageType.RETRY_TASKS -> retryTasks + WorkflowStateCmdMessageType.COMPLETE_TIMERS -> completeTimers + WorkflowStateCmdMessageType.COMPLETE_WORKFLOW -> completeWorkflow + WorkflowStateCmdMessageType.SEND_SIGNAL -> sendSignal }!! fun toByteArray() = AvroSerDe.writeBinary(this, serializer()) diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelope.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelope.kt index bc7ae3c82..ad9fead79 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelope.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelope.kt @@ -49,7 +49,7 @@ data class WorkflowEventEnvelope( @AvroName("signalDiscarded") private val signalDiscardedEvent: SignalDiscardedEvent? = null, @AvroName("signalReceived") private val signalReceivedEvent: SignalReceivedEvent? = null, - ) : Envelope { + ) : Envelope { init { val noNull = listOfNotNull( workflowCompletedEvent, @@ -73,7 +73,7 @@ data class WorkflowEventEnvelope( } companion object { - fun from(msg: WorkflowStateEngineEventMessage) = when (msg) { + fun from(msg: WorkflowStateEventMessage) = when (msg) { is WorkflowCompletedEvent -> WorkflowEventEnvelope( workflowId = msg.workflowId, @@ -162,7 +162,7 @@ data class WorkflowEventEnvelope( val writerSchema = AvroSerDe.currentSchema(serializer()) } - override fun message(): WorkflowStateEngineEventMessage = when (type) { + override fun message(): WorkflowStateEventMessage = when (type) { WorkflowStateEngineEventMessageType.WORKFLOW_COMPLETED -> workflowCompletedEvent WorkflowStateEngineEventMessageType.WORKFLOW_CANCELED -> workflowCanceledEvent WorkflowStateEngineEventMessageType.METHOD_DISPATCHED -> methodDispatchedEvent diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowMessage.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowMessage.kt index 196c01166..671e011bf 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowMessage.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowMessage.kt @@ -145,7 +145,7 @@ data class RetryWorkflowTask( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage +) : WorkflowMessage(), WorkflowStateCmdMessage /** * This command tells the workflow to retry some tasks. @@ -163,7 +163,7 @@ data class RetryTasks( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage +) : WorkflowMessage(), WorkflowStateCmdMessage /** * This message tells the workflow's method that a new client is waiting for its output @@ -177,7 +177,7 @@ data class WaitWorkflow( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage +) : WorkflowMessage(), WorkflowStateCmdMessage /** * This command dispatches a new workflow. @@ -201,7 +201,7 @@ data class DispatchWorkflow( @Deprecated("Not used since version 0.13.0") val parentMethodRunId: WorkflowMethodId? = null, @AvroDefault(Avro.NULL) override var requester: Requester?, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage { +) : WorkflowMessage(), WorkflowStateCmdMessage { init { // this is used only to handle previous messages that are still on <0.13 version @@ -301,7 +301,7 @@ data class DispatchMethod( val clientWaiting: Boolean, override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant? -) : WorkflowMessage(), WorkflowStateEngineCmdMessage, WorkflowEvent { +) : WorkflowMessage(), WorkflowStateCmdMessage, WorkflowEvent { init { // this is used only to handle previous messages that are still on <0.13 version // in topics or in bufferedMessages of a workflow state @@ -348,7 +348,7 @@ data class CompleteTimers( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage, WorkflowEvent +) : WorkflowMessage(), WorkflowStateCmdMessage, WorkflowEvent /** * This command tells the workflow to cancel itself @@ -364,7 +364,7 @@ data class CancelWorkflow( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage, WorkflowEvent +) : WorkflowMessage(), WorkflowStateCmdMessage, WorkflowEvent /** * This command tells the workflow to complete itself @@ -378,7 +378,7 @@ data class CompleteWorkflow( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage, WorkflowEvent +) : WorkflowMessage(), WorkflowStateCmdMessage, WorkflowEvent /** * This command sends a signal to the workflow @@ -395,7 +395,7 @@ data class SendSignal( override val emitterName: EmitterName, @AvroDefault(Avro.NULL) override var emittedAt: MillisInstant?, @AvroDefault(Avro.NULL) override val requester: Requester?, -) : WorkflowMessage(), WorkflowStateEngineCmdMessage, WorkflowEvent +) : WorkflowMessage(), WorkflowStateCmdMessage, WorkflowEvent /** * This event tells the workflow that the method of another workflow is unknown. @@ -642,7 +642,7 @@ data class WorkflowCompletedEvent( override val workflowVersion: WorkflowVersion?, override val workflowId: WorkflowId, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage +) : WorkflowMessage(), WorkflowStateEventMessage /** * This event tells us that the workflow was canceled @@ -654,7 +654,7 @@ data class WorkflowCanceledEvent( override val workflowVersion: WorkflowVersion?, override val workflowId: WorkflowId, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage +) : WorkflowMessage(), WorkflowStateEventMessage /** * This event tells us that a new method has been commanded on this workflow @@ -671,7 +671,7 @@ data class MethodCommandedEvent( val methodParameterTypes: MethodParameterTypes?, val requester: Requester, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage +) : WorkflowMessage(), WorkflowStateEventMessage /** * This event tells us that a method has completed on this workflow @@ -687,7 +687,7 @@ data class MethodCompletedEvent( override val awaitingRequesters: Set, override val emitterName: EmitterName, val returnValue: MethodReturnValue, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodTerminated { +) : WorkflowMessage(), WorkflowStateEventMessage, MethodTerminated { override fun getEventForAwaitingClients(emitterName: EmitterName) = awaitingRequesters.filterIsInstance().map { requester -> MethodCompleted( @@ -732,7 +732,7 @@ data class MethodFailedEvent( override val awaitingRequesters: Set, override val emitterName: EmitterName, val deferredError: DeferredError -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodTerminated { +) : WorkflowMessage(), WorkflowStateEventMessage, MethodTerminated { override fun getEventForAwaitingClients(emitterName: EmitterName) = awaitingRequesters.filterIsInstance().map { requester -> MethodFailed( @@ -778,7 +778,7 @@ data class MethodCanceledEvent( override val workflowMethodId: WorkflowMethodId, override val awaitingRequesters: Set, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodTerminated { +) : WorkflowMessage(), WorkflowStateEventMessage, MethodTerminated { override fun getEventForAwaitingClients(emitterName: EmitterName) = awaitingRequesters.filterIsInstance().map { requester -> @@ -823,7 +823,7 @@ data class MethodTimedOutEvent( override val workflowMethodId: WorkflowMethodId, override val awaitingRequesters: Set, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodTerminated { +) : WorkflowMessage(), WorkflowStateEventMessage, MethodTerminated { override fun getEventForAwaitingClients(emitterName: EmitterName) = awaitingRequesters.filterIsInstance().map { requester -> MethodTimedOut( @@ -867,7 +867,7 @@ data class TaskDispatchedEvent( override val workflowMethodName: MethodName, override val workflowMethodId: WorkflowMethodId, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodEvent, RemoteTaskEvent { +) : WorkflowMessage(), WorkflowStateEventMessage, MethodEvent, RemoteTaskEvent { override fun taskId() = taskDispatched.taskId override fun serviceName() = taskDispatched.serviceName } @@ -885,7 +885,7 @@ data class RemoteMethodDispatchedEvent( override val workflowMethodName: MethodName, override val workflowMethodId: WorkflowMethodId, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodEvent +) : WorkflowMessage(), WorkflowStateEventMessage, MethodEvent /** * This event tells us that a remote timer was dispatched by this workflow @@ -900,7 +900,7 @@ data class TimerDispatchedEvent( override val workflowMethodName: MethodName, override val workflowMethodId: WorkflowMethodId, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodEvent +) : WorkflowMessage(), WorkflowStateEventMessage, MethodEvent /** * This event tells us that a signal was dispatched by this workflow @@ -915,7 +915,7 @@ data class SignalDispatchedEvent( override val workflowMethodName: MethodName, override val workflowMethodId: WorkflowMethodId, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage, MethodEvent +) : WorkflowMessage(), WorkflowStateEventMessage, MethodEvent /** * This event tells us that a signal was received and discarded @@ -928,7 +928,7 @@ data class SignalDiscardedEvent( override val workflowId: WorkflowId, override val workflowVersion: WorkflowVersion?, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage +) : WorkflowMessage(), WorkflowStateEventMessage /** * This event tells us that a signal was received and used @@ -941,4 +941,4 @@ data class SignalReceivedEvent( override val workflowId: WorkflowId, override val workflowVersion: WorkflowVersion?, override val emitterName: EmitterName, -) : WorkflowMessage(), WorkflowStateEngineEventMessage +) : WorkflowMessage(), WorkflowStateEventMessage diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEngineCmdMessage.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateCmdMessage.kt similarity index 64% rename from infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEngineCmdMessage.kt rename to infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateCmdMessage.kt index 5b1a9d350..fc2fe1a30 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEngineCmdMessage.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateCmdMessage.kt @@ -28,26 +28,26 @@ import io.infinitic.common.requester.Requester import kotlinx.serialization.Serializable @Serializable -sealed interface WorkflowStateEngineCmdMessage : WorkflowStateEngineMessage { +sealed interface WorkflowStateCmdMessage : WorkflowStateEngineMessage { val requester: Requester? } -fun WorkflowStateEngineCmdMessage.type(): WorkflowStateEngineCmdMessageType = when (this) { - is CancelWorkflow -> WorkflowStateEngineCmdMessageType.CANCEL_WORKFLOW - is CompleteTimers -> WorkflowStateEngineCmdMessageType.COMPLETE_TIMERS - is CompleteWorkflow -> WorkflowStateEngineCmdMessageType.COMPLETE_WORKFLOW - is DispatchMethod -> WorkflowStateEngineCmdMessageType.DISPATCH_METHOD - is DispatchWorkflow -> WorkflowStateEngineCmdMessageType.DISPATCH_WORKFLOW - is RetryTasks -> WorkflowStateEngineCmdMessageType.RETRY_TASKS - is RetryWorkflowTask -> WorkflowStateEngineCmdMessageType.RETRY_WORKFLOW_TASK - is SendSignal -> WorkflowStateEngineCmdMessageType.SEND_SIGNAL - is WaitWorkflow -> WorkflowStateEngineCmdMessageType.WAIT_WORKFLOW +fun WorkflowStateCmdMessage.type(): WorkflowStateCmdMessageType = when (this) { + is CancelWorkflow -> WorkflowStateCmdMessageType.CANCEL_WORKFLOW + is CompleteTimers -> WorkflowStateCmdMessageType.COMPLETE_TIMERS + is CompleteWorkflow -> WorkflowStateCmdMessageType.COMPLETE_WORKFLOW + is DispatchMethod -> WorkflowStateCmdMessageType.DISPATCH_METHOD + is DispatchWorkflow -> WorkflowStateCmdMessageType.DISPATCH_WORKFLOW + is RetryTasks -> WorkflowStateCmdMessageType.RETRY_TASKS + is RetryWorkflowTask -> WorkflowStateCmdMessageType.RETRY_WORKFLOW_TASK + is SendSignal -> WorkflowStateCmdMessageType.SEND_SIGNAL + is WaitWorkflow -> WorkflowStateCmdMessageType.WAIT_WORKFLOW } @Serializable @AvroNamespace("io.infinitic.workflows.engine") @AvroName("WorkflowCmdMessageType") -enum class WorkflowStateEngineCmdMessageType { +enum class WorkflowStateCmdMessageType { WAIT_WORKFLOW, CANCEL_WORKFLOW, RETRY_WORKFLOW_TASK, diff --git a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEngineEventMessage.kt b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEventMessage.kt similarity index 95% rename from infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEngineEventMessage.kt rename to infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEventMessage.kt index 2892d5919..56c972859 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEngineEventMessage.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowStateEventMessage.kt @@ -34,11 +34,11 @@ import io.infinitic.common.workflows.data.workflowMethods.WorkflowMethodId import kotlinx.serialization.Serializable @Serializable -sealed interface WorkflowStateEngineEventMessage : WorkflowMessageInterface { +sealed interface WorkflowStateEventMessage : WorkflowMessageInterface { val workflowVersion: WorkflowVersion? } -fun WorkflowStateEngineEventMessage.type(): WorkflowStateEngineEventMessageType = when (this) { +fun WorkflowStateEventMessage.type(): WorkflowStateEngineEventMessageType = when (this) { is WorkflowCompletedEvent -> WorkflowStateEngineEventMessageType.WORKFLOW_COMPLETED is WorkflowCanceledEvent -> WorkflowStateEngineEventMessageType.WORKFLOW_CANCELED is MethodCommandedEvent -> WorkflowStateEngineEventMessageType.METHOD_DISPATCHED diff --git a/infinitic-common/src/main/kotlin/io/infinitic/tasks/Task.kt b/infinitic-common/src/main/kotlin/io/infinitic/tasks/Task.kt index 5ee25f2c2..9f0bb1b83 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/tasks/Task.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/tasks/Task.kt @@ -24,6 +24,7 @@ package io.infinitic.tasks import io.infinitic.clients.InfiniticClientInterface import io.infinitic.common.tasks.executors.errors.ExecutionError +import org.jetbrains.annotations.TestOnly object Task { private val context: ThreadLocal = ThreadLocal.withInitial { null } @@ -32,16 +33,20 @@ object Task { ThreadLocal.withInitial { mapOf() } @JvmStatic - val batchContext: Map get() = _batchContext.get() + fun getContext(taskId: String): TaskContext? = _batchContext.get()[taskId] + @TestOnly @JvmStatic - fun setBatchContext(lc: Map) { - _batchContext.set(lc) + fun setContext(taskId: String, taskContext: TaskContext) { + val batchContext = _batchContext.get().toMutableMap() + batchContext[taskId] = taskContext + _batchContext.set(batchContext) } @JvmStatic - fun hasContext() = context.get() != null + fun getContext(): TaskContext? = context.get() + @TestOnly @JvmStatic fun setContext(c: TaskContext) { context.set(c) @@ -87,6 +92,10 @@ object Task { val retryIndex: Int get() = context.get().retryIndex.toInt() + @JvmStatic + val batchKey: String? + get() = context.get().batchKey + @JvmStatic val tags: Set get() = context.get().tags @@ -95,7 +104,6 @@ object Task { val meta: MutableMap get() = context.get().meta - @JvmStatic val withTimeout: WithTimeout? get() = context.get().withTimeout diff --git a/infinitic-common/src/main/kotlin/io/infinitic/tasks/TaskContext.kt b/infinitic-common/src/main/kotlin/io/infinitic/tasks/TaskContext.kt index e254ef011..ce33710ff 100644 --- a/infinitic-common/src/main/kotlin/io/infinitic/tasks/TaskContext.kt +++ b/infinitic-common/src/main/kotlin/io/infinitic/tasks/TaskContext.kt @@ -45,6 +45,7 @@ interface TaskContext { val retrySequence: TaskRetrySequence val retryIndex: TaskRetryIndex val lastError: ExecutionError? + val batchKey: String? val tags: Set val meta: MutableMap val withTimeout: WithTimeout? diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/BatchByTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/BatchByTests.kt index d4bce5b55..067ca3b1f 100644 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/BatchByTests.kt +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/BatchByTests.kt @@ -35,6 +35,7 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.cancel import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.channels.ClosedReceiveChannelException +import kotlinx.coroutines.delay import kotlinx.coroutines.isActive import kotlin.random.Random @@ -84,9 +85,7 @@ internal class BatchByTests : StringSpec( "should be able to batch by max duration, up to scope cancellation" { class SlowConsumer : IntConsumer() { - override fun receiveAsync() = super.receiveAsync().thenApply { - it.also { Thread.sleep(70) } - } + override suspend fun receive() = super.receive().also { delay(70) } } with(logger) { diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumerTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumerTests.kt deleted file mode 100644 index b3223baa3..000000000 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumerTests.kt +++ /dev/null @@ -1,134 +0,0 @@ -/** - * "Commons Clause" License Condition v1.0 - * - * The Software is provided to you by the Licensor under the License, as defined below, subject to - * the following condition. - * - * Without limiting other conditions in the License, the grant of rights under the License will not - * include, and the License does not grant to you, the right to Sell the Software. - * - * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you - * under the License to provide to third parties, for a fee or other consideration (including - * without limitation fees for hosting or consulting/ support services related to the Software), a - * product or service whose value derives, entirely or substantially, from the functionality of the - * Software. Any license notice or attribution required by the License must also include this - * Commons Clause License Condition notice. - * - * Software: Infinitic - * - * License: MIT License (https://opensource.org/licenses/MIT) - * - * Licensor: infinitic.io - */ -package io.infinitic.common.transport.consumers - -import io.infinitic.common.data.MillisInstant -import io.infinitic.common.fixtures.later -import io.kotest.core.spec.style.StringSpec -import io.kotest.matchers.collections.shouldContainAll -import io.kotest.matchers.collections.shouldNotContain -import io.kotest.matchers.ints.shouldBeGreaterThan -import io.kotest.matchers.ints.shouldBeGreaterThanOrEqual -import io.kotest.matchers.shouldBe -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.cancel - -internal class ProcessorConsumerTests : StringSpec( - { - val consumer = IntConsumer() - - val processor = ProcessorConsumer( - consumer, - ::beforeNegativeAcknowledgement, - ) - - fun getScope() = CoroutineScope(Dispatchers.IO) - - beforeEach { - consumer.reset() - receivedList.clear() - deserializedList.clear() - processedList.clear() - acknowledgedList.clear() - negativeAcknowledgedList.clear() - beforeNegativeAcknowledgedList.clear() - } - - "Processor stops when current scope is canceled, all ongoing messages should be processed" { - val scope = getScope() - later { scope.cancel() } - - with(processor) { - scope.startAsync(3, ::deserialize, ::process).join() - } - receivedList.size shouldBeGreaterThan 0 - acknowledgedList.sorted() shouldBe processedList.sorted() - } - - "An Error during deserialization triggers quitting, but does not prevent finishing current processing" { - suspend fun deserializeWitError(value: IntMessage) = - if (value.value == 10) throw Error("Expected Error") else deserialize(value) - - with(processor) { - getScope().startAsync(3, ::deserializeWitError, ::process).join() - } - - acknowledgedList shouldContainAll (1..9).toList() - acknowledgedList shouldNotContain 10 - acknowledgedList.sorted() shouldBe deserializedList.sorted() - negativeAcknowledgedList shouldBe emptyList() - } - - "An Error during processing triggers quitting, but does not prevent finishing current processing" { - suspend fun processWithError(message: DeserializedIntMessage, publishTime: MillisInstant) = - if (message.value.value == 10) throw Error("Expected Error") - else process(message, publishTime) - - with(processor) { - getScope().startAsync(3, ::deserialize, ::processWithError).join() - } - - acknowledgedList shouldContainAll (1..9).toList() - acknowledgedList shouldNotContain 10 - acknowledgedList.sorted() shouldBe processedList.sorted() - negativeAcknowledgedList shouldBe emptyList() - } - - "An exception during deserialization triggers negative acknowledgment" { - suspend fun deserializeWitError(value: IntMessage) = when (value.value) { - 10 -> throw Exception("Expected Exception") - 20 -> throw Error("Expected Error") - else -> deserialize(value) - } - - with(processor) { - getScope().startAsync(3, ::deserializeWitError, ::process).join() - } - - acknowledgedList.size shouldBeGreaterThanOrEqual 18 - deserializedList.sorted() shouldBe acknowledgedList.sorted() - negativeAcknowledgedList shouldBe listOf(10) - beforeNegativeAcknowledgedList shouldBe listOf(10) - } - - "An exception during processing triggers negative acknowledgment" { - suspend fun processWithException( - message: DeserializedIntMessage, - publishTime: MillisInstant - ) = when (message.value.value) { - 10 -> throw Exception("Expected Exception") - 20 -> throw Error("Expected Error") - else -> process(message, publishTime) - } - - with(processor) { - getScope().startAsync(3, ::deserialize, ::processWithException).join() - } - - processedList.sorted() shouldBe acknowledgedList.sorted() - negativeAcknowledgedList shouldBe listOf(10) - beforeNegativeAcknowledgedList shouldBe listOf(10) - } - }, -) diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumerWithBatchTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumerWithBatchTests.kt deleted file mode 100644 index 38e57df11..000000000 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/ProcessorConsumerWithBatchTests.kt +++ /dev/null @@ -1,256 +0,0 @@ -/** - * "Commons Clause" License Condition v1.0 - * - * The Software is provided to you by the Licensor under the License, as defined below, subject to - * the following condition. - * - * Without limiting other conditions in the License, the grant of rights under the License will not - * include, and the License does not grant to you, the right to Sell the Software. - * - * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you - * under the License to provide to third parties, for a fee or other consideration (including - * without limitation fees for hosting or consulting/ support services related to the Software), a - * product or service whose value derives, entirely or substantially, from the functionality of the - * Software. Any license notice or attribution required by the License must also include this - * Commons Clause License Condition notice. - * - * Software: Infinitic - * - * License: MIT License (https://opensource.org/licenses/MIT) - * - * Licensor: infinitic.io - */ -package io.infinitic.common.transport.consumers - -import io.infinitic.common.data.MillisInstant -import io.infinitic.common.fixtures.later -import io.infinitic.common.transport.BatchConfig -import io.kotest.core.spec.style.StringSpec -import io.kotest.matchers.collections.shouldContainAll -import io.kotest.matchers.collections.shouldNotContain -import io.kotest.matchers.collections.shouldNotContainAnyOf -import io.kotest.matchers.ints.shouldBeGreaterThan -import io.kotest.matchers.ints.shouldBeGreaterThanOrEqual -import io.kotest.matchers.shouldBe -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.cancel - -internal class ProcessorConsumerWithBatchTests : StringSpec( - { - val consumer = IntConsumer() - - val processor = ProcessorConsumer( - consumer, - ::beforeNegativeAcknowledgement, - ) - - fun getScope() = CoroutineScope(Dispatchers.IO) - - beforeEach { - consumer.reset() - receivedList.clear() - deserializedList.clear() - processedList.clear() - acknowledgedList.clear() - negativeAcknowledgedList.clear() - beforeNegativeAcknowledgedList.clear() - } - - "Processor stops when current scope is canceled, all ongoing messages should be processed" { - val scope = getScope() - later { scope.cancel() } - - with(processor) { - scope.startAsync( - 3, - ::deserialize, - ::process, - ::getBatchingConfig, - ::processBatch, - ).join() - } - receivedList.size shouldBeGreaterThan 0 - acknowledgedList.sorted() shouldBe processedList.sorted() - } - - "An Error during deserialization triggers quitting, but does not prevent finishing current processing" { - suspend fun deserializeWitError(value: IntMessage) = - if (value.value == 10) throw Error("Expected Error") else deserialize(value) - - with(processor) { - getScope().startAsync( - 3, - ::deserializeWitError, - ::process, - ::getBatchingConfig, - ::processBatch, - ).join() - } - - acknowledgedList shouldContainAll (1..9).toList() - acknowledgedList shouldNotContain 10 - acknowledgedList.sorted() shouldBe deserializedList.sorted() - negativeAcknowledgedList shouldBe emptyList() - } - - "An Error during processing triggers quitting, but does not prevent finishing current processing" { - suspend fun processWithError(message: DeserializedIntMessage, publishTime: MillisInstant) = - if (message.value.value == 12) throw Error("Expected Error") - else process(message, publishTime) - - with(processor) { - getScope().startAsync( - 3, - ::deserialize, - ::processWithError, - ::getBatchingConfig, - ::processBatch, - ).join() - } - - acknowledgedList shouldContainAll (1..11).toList() - acknowledgedList shouldNotContain 12 - acknowledgedList.sorted() shouldBe processedList.sorted() - negativeAcknowledgedList shouldBe emptyList() - } - - "An Error during getBatchingConfig triggers quitting, but does not prevent finishing current processing" { - fun getBatchingConfigWithError(deserialized: DeserializedIntMessage): BatchConfig? = - if (deserialized.value.value == 10) throw Error("Expected Error") - else getBatchingConfig(deserialized) - - with(processor) { - getScope().startAsync( - 3, - ::deserialize, - ::process, - ::getBatchingConfigWithError, - ::processBatch, - ).join() - } - - acknowledgedList shouldNotContain 10 - acknowledgedList.sorted() shouldBe processedList.sorted() - negativeAcknowledgedList shouldBe listOf() - } - - "An Error during batch processing triggers quitting, but does not prevent finishing current processing" { - fun processBatchWithError( - batch: List, publishTimes: List - ) = if (batch.map { it.value.value }.contains(13)) throw Error("Expected Error") - else processBatch(batch, publishTimes) - - with(processor) { - getScope().startAsync( - 3, - ::deserialize, - ::process, - ::getBatchingConfig, - ::processBatchWithError, - ).join() - } - - acknowledgedList shouldNotContain List(4) { 3 * it + 1 } - acknowledgedList.sorted() shouldBe processedList.sorted() - negativeAcknowledgedList shouldBe listOf() - } - - "An Exception during deserialization triggers negative acknowledgment" { - suspend fun deserializeWithException(value: IntMessage) = when (value.value) { - 10 -> throw Exception("Expected Exception") - 20 -> throw Error("Expected Error") - else -> deserialize(value) - } - - with(processor) { - getScope().startAsync( - 3, - ::deserializeWithException, - ::process, - ::getBatchingConfig, - ::processBatch, - ).join() - } - - acknowledgedList.size shouldBeGreaterThanOrEqual 18 - deserializedList.sorted() shouldBe acknowledgedList.sorted() - negativeAcknowledgedList shouldBe listOf(10) - beforeNegativeAcknowledgedList shouldBe listOf(10) - } - - "An Exception during processing triggers negative acknowledgment" { - suspend fun processWithException( - message: DeserializedIntMessage, - publishTime: MillisInstant - ) = when (message.value.value) { - 12 -> throw Exception("Expected Exception") - 21 -> throw Error("Expected Error") - else -> process(message, publishTime) - } - - with(processor) { - getScope().startAsync( - 3, - ::deserialize, - ::processWithException, - ::getBatchingConfig, - ::processBatch, - ).join() - } - - processedList.sorted() shouldBe acknowledgedList.sorted() - negativeAcknowledgedList shouldBe listOf(12) - beforeNegativeAcknowledgedList shouldBe listOf(12) - } - - "An Exception during getBatchingConfig triggers quitting, but does not prevent finishing current processing" { - fun getBatchingConfigWithException(deserialized: DeserializedIntMessage): BatchConfig? = - when (deserialized.value.value) { - 10 -> throw Exception("Expected Exception") - 20 -> throw Error("Expected Error") - else -> getBatchingConfig(deserialized) - } - - with(processor) { - getScope().startAsync( - 3, - ::deserialize, - ::process, - ::getBatchingConfigWithException, - ::processBatch, - ).join() - } - - acknowledgedList shouldContainAll (1..9).toList() - acknowledgedList shouldNotContain 10 - acknowledgedList.sorted() shouldBe processedList.sorted() - negativeAcknowledgedList shouldBe listOf(10) - } - - "An Exception during batch processing triggers quitting, but does not prevent finishing current processing" { - fun processBatchWithException( - batch: List, - publishTimes: List - ) = when { - batch.map { it.value.value }.contains(13) -> throw Exception("Expected Exception") - batch.map { it.value.value }.contains(61) -> throw Error("Expected Error") - else -> processBatch(batch, publishTimes) - } - - with(processor) { - getScope().startAsync( - 3, - ::deserialize, - ::process, - ::getBatchingConfig, - ::processBatchWithException, - ).join() - } - - acknowledgedList shouldNotContainAnyOf List(20) { 3 * it + 1 } - acknowledgedList.sorted() shouldBe processedList.sorted() - negativeAcknowledgedList shouldContainAll List(20) { 3 * it + 1 } - } - }, -) diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartBatchingTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartBatchingTests.kt index ea3598fa9..0cda0880a 100644 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartBatchingTests.kt +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartBatchingTests.kt @@ -34,6 +34,7 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.cancel import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.channels.ClosedReceiveChannelException +import kotlinx.coroutines.delay import kotlinx.coroutines.isActive import kotlinx.coroutines.launch import kotlin.random.Random @@ -76,8 +77,8 @@ internal class StartBatchingTests : StringSpec( "should be able to batch by max duration, up to scope cancellation" { class SlowConsumer : IntConsumer() { - override fun receiveAsync() = super.receiveAsync().thenApply { - it.also { Thread.sleep(70) } + override suspend fun receive() = super.receive().also { + delay(70) } } diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartConsumingTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartConsumingTests.kt index 320b81a28..741716072 100644 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartConsumingTests.kt +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/StartConsumingTests.kt @@ -57,10 +57,8 @@ internal class StartConsumingTests : StringSpec( "Error in receiveAsync should close the channel and cancel the scope" { class ErrorConsumer : IntConsumer() { - override fun receiveAsync() = super.receiveAsync().thenApply { - it.also { - if (it.value == 100) throw Error("Expected Error") - } + override suspend fun receive() = super.receive().also { + if (it.value == 100) throw Error("Expected Error") } } @@ -80,10 +78,8 @@ internal class StartConsumingTests : StringSpec( "Exception in receiveAsync should not close the channel neither cancel the scope" { class ExceptionConsumer : IntConsumer() { - override fun receiveAsync() = super.receiveAsync().thenApply { - it.also { - if (it.value == 100) throw Exception("Expected Exception") - } + override suspend fun receive() = super.receive().also { + if (it.value == 100) throw Exception("Expected Exception") } } diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/fakes.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/fakes.kt index 3a303986b..5ff9b8b70 100644 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/fakes.kt +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/fakes.kt @@ -25,22 +25,32 @@ package io.infinitic.common.transport.consumers import io.infinitic.common.data.MillisDuration import io.infinitic.common.data.MillisInstant import io.infinitic.common.transport.BatchConfig +import io.infinitic.common.transport.Topic import io.infinitic.common.transport.TransportConsumer import io.infinitic.common.transport.TransportMessage -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.delay -import kotlinx.coroutines.future.future import java.util.* -import java.util.concurrent.CompletableFuture import java.util.concurrent.atomic.AtomicInteger import kotlin.random.Random -internal data class IntMessage(val value: Int) : TransportMessage { +internal data class IntMessage(val value: Int) : TransportMessage { override val messageId: String = value.toString() - override val redeliveryCount: Int = Random.nextInt(3) override val publishTime: MillisInstant = MillisInstant.now() + + override lateinit var topic: Topic<*> + + override fun deserialize(): DeserializedIntMessage = DeserializedIntMessage(this) + + override suspend fun negativeAcknowledge() { + negativeAcknowledgedList.add(value) + } + + override suspend fun acknowledge() { + acknowledgedList.add(value) + } + + override val hasBeenSentToDeadLetterQueue = negativeAcknowledgedList.contains(value) + override fun toString(): String = value.toString() } @@ -53,56 +63,40 @@ internal val deserializedList = Collections.synchronizedList(mutableListOf( internal val processedList = Collections.synchronizedList(mutableListOf()) internal val acknowledgedList = Collections.synchronizedList(mutableListOf()) internal val negativeAcknowledgedList = Collections.synchronizedList(mutableListOf()) -internal val beforeNegativeAcknowledgedList = Collections.synchronizedList(mutableListOf()) internal open class IntConsumer : TransportConsumer { private val counter = AtomicInteger(0) - private val scope = CoroutineScope(Dispatchers.IO + SupervisorJob()) - fun reset() { counter.set(0) } - override fun receiveAsync() = scope.future { - IntMessage(counter.incrementAndGet()) - .also { receivedList.add(it.value) } - } + override suspend fun receive() = IntMessage(counter.incrementAndGet()) + .also { receivedList.add(it.value) } - override fun negativeAcknowledgeAsync(message: IntMessage): CompletableFuture = - scope.future { - delay(Random.nextLong(5)) - .also { negativeAcknowledgedList.add(message.value) } - } - - override fun acknowledgeAsync(message: IntMessage): CompletableFuture = - scope.future { - delay(Random.nextLong(5)) - .also { acknowledgedList.add(message.value) } - } + override val maxRedeliveryCount = 1 + override val name: String = this.toString() } -internal suspend fun deserialize(value: IntMessage) = DeserializedIntMessage(value).also { - println("start deserializing...$value") - delay(Random.nextLong(5)) +internal fun deserialize(message: IntMessage) = DeserializedIntMessage(message).also { + println("start deserializing...$message") deserializedList.add(it.value.value) - println("end deserializing...$value") + println("end deserializing...$message") } - -internal suspend fun process(message: DeserializedIntMessage, publishTime: MillisInstant) { - println("start processing......${message.value.value}") +internal suspend fun process(deserialized: DeserializedIntMessage, publishTime: MillisInstant) { + println("start processing......${deserialized.value.value}") delay(Random.nextLong(100)) - println("end processing......${message.value.value}") - processedList.add(message.value.value) + println("end processing......${deserialized.value.value}") + processedList.add(deserialized.value.value) } internal fun processBatch(batch: List, publishTimes: List) { processedList.addAll(batch.map { it.value.value }) } -internal fun getBatchingConfig(value: DeserializedIntMessage): BatchConfig? { - val i = value.value.value +internal fun batchConfig(deserialized: DeserializedIntMessage): BatchConfig? { + val i = deserialized.value.value return when { (i % 3) == 0 -> null (i % 3) == 1 -> BatchConfig("1", 20, MillisDuration(1000 * 3600 * 50)) @@ -111,10 +105,3 @@ internal fun getBatchingConfig(value: DeserializedIntMessage): BatchConfig? { } } -internal fun beforeNegativeAcknowledgement( - message: IntMessage, - deserialized: DeserializedIntMessage?, - e: Exception -) { - beforeNegativeAcknowledgedList.add(message.value) -} diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/startAsyncTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/startAsyncTests.kt new file mode 100644 index 000000000..7cb9bbc49 --- /dev/null +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/startAsyncTests.kt @@ -0,0 +1,154 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.common.transport.consumers + +import io.github.oshai.kotlinlogging.KotlinLogging +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.fixtures.later +import io.kotest.core.spec.style.StringSpec +import io.kotest.matchers.collections.shouldContainAll +import io.kotest.matchers.collections.shouldNotContain +import io.kotest.matchers.ints.shouldBeGreaterThan +import io.kotest.matchers.ints.shouldBeGreaterThanOrEqual +import io.kotest.matchers.shouldBe +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.cancel + +internal class ProcessorConsumerTests : StringSpec( + { + val logger = KotlinLogging.logger {} + val consumer = IntConsumer() + + fun getScope() = CoroutineScope(Dispatchers.IO) + + beforeEach { + consumer.reset() + receivedList.clear() + deserializedList.clear() + processedList.clear() + acknowledgedList.clear() + negativeAcknowledgedList.clear() + } + + "Processor stops when current scope is canceled, all ongoing messages should be processed" { + with(logger) { + val scope = getScope() + later { scope.cancel() } + + with(scope) { + consumer + .startAsync(3, ::deserialize, ::process) + .join() + } + receivedList.size shouldBeGreaterThan 0 + processedList.sorted() shouldBe deserializedList.sorted() + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe emptyList() + } + } + + "An Error during deserialization triggers quitting, but does not prevent finishing current processing" { + with(logger) { + fun deserializeWitError(value: IntMessage) = + if (value.value == 10) throw Error("Expected Error") else deserialize(value) + + with(getScope()) { + consumer + .startAsync(3, ::deserializeWitError, ::process) + .join() + } + + acknowledgedList shouldContainAll (1..9).toList() + acknowledgedList shouldNotContain 10 + processedList.sorted() shouldBe deserializedList.sorted() + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe emptyList() + } + } + + "An Error during processing triggers quitting, but does not prevent finishing current processing" { + with(logger) { + suspend fun processWithError( + message: DeserializedIntMessage, + publishTime: MillisInstant + ) = if (message.value.value == 10) throw Error("Expected Error") + else process(message, publishTime) + + + with(getScope()) { + consumer + .startAsync(3, ::deserialize, ::processWithError) + .join() + } + + acknowledgedList shouldContainAll (1..9).toList() + acknowledgedList shouldNotContain 10 + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe emptyList() + } + } + + "An exception during deserialization triggers negative acknowledgment" { + with(logger) { + fun deserializeWitError(value: IntMessage) = when (value.value) { + 10 -> throw Exception("Expected Exception") + 20 -> throw Error("Expected Error") + else -> deserialize(value) + } + + with(getScope()) { + consumer + .startAsync(3, ::deserializeWitError, ::process) + .join() + } + + acknowledgedList.size shouldBeGreaterThanOrEqual 18 + deserializedList.sorted() shouldBe acknowledgedList.sorted() + negativeAcknowledgedList shouldBe listOf(10) + } + } + + "An exception during processing triggers negative acknowledgment" { + with(logger) { + suspend fun processWithException( + message: DeserializedIntMessage, + publishTime: MillisInstant + ) = when (message.value.value) { + 10 -> throw Exception("Expected Exception") + 20 -> throw Error("Expected Error") + else -> process(message, publishTime) + } + + with(getScope()) { + consumer + .startAsync(3, ::deserialize, ::processWithException) + .join() + } + + processedList.sorted() shouldBe acknowledgedList.sorted() + negativeAcknowledgedList shouldBe listOf(10) + } + } + }, +) diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/startAsyncWithBatchTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/startAsyncWithBatchTests.kt new file mode 100644 index 000000000..18ed4e872 --- /dev/null +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/transport/consumers/startAsyncWithBatchTests.kt @@ -0,0 +1,292 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.common.transport.consumers + +import io.github.oshai.kotlinlogging.KotlinLogging +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.fixtures.later +import io.infinitic.common.transport.BatchConfig +import io.kotest.core.spec.style.StringSpec +import io.kotest.matchers.collections.shouldContainAll +import io.kotest.matchers.collections.shouldNotContain +import io.kotest.matchers.collections.shouldNotContainAnyOf +import io.kotest.matchers.ints.shouldBeGreaterThan +import io.kotest.matchers.ints.shouldBeGreaterThanOrEqual +import io.kotest.matchers.shouldBe +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.cancel + +internal class ProcessorConsumerWithBatchTests : StringSpec( + { + val logger = KotlinLogging.logger {} + val consumer = IntConsumer() + + fun getScope() = CoroutineScope(Dispatchers.IO) + + beforeEach { + consumer.reset() + receivedList.clear() + deserializedList.clear() + processedList.clear() + acknowledgedList.clear() + negativeAcknowledgedList.clear() + } + + "Processor stops when current scope is canceled, all ongoing messages should be processed" { + with(logger) { + val scope = getScope() + + later { scope.cancel() } + + with(scope) { + consumer + .startAsync(3, ::deserialize, ::process, null, ::batchConfig, ::processBatch) + .join() + } + receivedList.size shouldBeGreaterThan 0 + acknowledgedList.sorted() shouldBe processedList.sorted() + } + } + + "An Error during deserialization triggers quitting, but does not prevent finishing current processing" { + with(logger) { + fun deserializeWitError(value: IntMessage) = + if (value.value == 10) throw Error("Expected Error") else deserialize(value) + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserializeWitError, + ::process, + null, + ::batchConfig, + ::processBatch, + ) + .join() + } + + acknowledgedList shouldContainAll (1..9).toList() + acknowledgedList shouldNotContain 10 + acknowledgedList.sorted() shouldBe deserializedList.sorted() + negativeAcknowledgedList shouldBe emptyList() + } + } + + "An Error during processing triggers quitting, but does not prevent finishing current processing" { + with(logger) { + suspend fun processWithError( + message: DeserializedIntMessage, + publishTime: MillisInstant + ) = if (message.value.value == 12) throw Error("Expected Error") + else process(message, publishTime) + + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserialize, + ::processWithError, + null, + ::batchConfig, + ::processBatch, + ) + .join() + } + + acknowledgedList shouldContainAll (1..11).toList() + acknowledgedList shouldNotContain 12 + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe emptyList() + } + } + + "An Error during getBatchingConfig triggers quitting, but does not prevent finishing current processing" { + with(logger) { + fun batchConfigWithError(deserialized: DeserializedIntMessage): BatchConfig? = + if (deserialized.value.value == 10) throw Error("Expected Error") + else batchConfig(deserialized) + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserialize, + ::process, + null, + ::batchConfigWithError, + ::processBatch, + ) + .join() + } + + acknowledgedList shouldNotContain 10 + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe listOf() + } + } + + "An Error during batch processing triggers quitting, but does not prevent finishing current processing" { + with(logger) { + fun processBatchWithError( + batch: List, publishTimes: List + ) = if (batch.map { it.value.value }.contains(13)) throw Error("Expected Error") + else processBatch(batch, publishTimes) + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserialize, + ::process, + null, + ::batchConfig, + ::processBatchWithError, + ) + .join() + } + + acknowledgedList shouldNotContain List(4) { 3 * it + 1 } + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe listOf() + } + } + + "An Exception during deserialization triggers negative acknowledgment" { + with(logger) { + fun deserializeWithException(value: IntMessage) = when (value.value) { + 10 -> throw Exception("Expected Exception") + 20 -> throw Error("Expected Error") + else -> deserialize(value) + } + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserializeWithException, + ::process, + null, + ::batchConfig, + ::processBatch, + ) + .join() + } + + acknowledgedList.size shouldBeGreaterThanOrEqual 18 + deserializedList.sorted() shouldBe acknowledgedList.sorted() + negativeAcknowledgedList shouldBe listOf(10) + } + } + + "An Exception during processing triggers negative acknowledgment" { + with(logger) { + suspend fun processWithException( + message: DeserializedIntMessage, + publishTime: MillisInstant + ) = when (message.value.value) { + 12 -> throw Exception("Expected Exception") + 21 -> throw Error("Expected Error") + else -> process(message, publishTime) + } + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserialize, + ::processWithException, + null, + ::batchConfig, + ::processBatch, + ) + .join() + } + + processedList.sorted() shouldBe acknowledgedList.sorted() + negativeAcknowledgedList shouldBe listOf(12) + } + } + + "An Exception during getBatchingConfig triggers quitting, but does not prevent finishing current processing" { + with(logger) { + fun batchConfigWithException(deserialized: DeserializedIntMessage): BatchConfig? = + when (deserialized.value.value) { + 10 -> throw Exception("Expected Exception") + 20 -> throw Error("Expected Error") + else -> batchConfig(deserialized) + } + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserialize, + ::process, + null, + ::batchConfigWithException, + ::processBatch, + ) + .join() + } + + acknowledgedList shouldContainAll (1..9).toList() + acknowledgedList shouldNotContain 10 + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldBe listOf(10) + } + } + + "An Exception during batch processing triggers quitting, but does not prevent finishing current processing" { + with(logger) { + fun processBatchWithException( + batch: List, + publishTimes: List + ) = when { + batch.map { it.value.value }.contains(13) -> throw Exception("Expected Exception") + batch.map { it.value.value }.contains(61) -> throw Error("Expected Error") + else -> processBatch(batch, publishTimes) + } + + with(getScope()) { + consumer + .startAsync( + 3, + ::deserialize, + ::process, + null, + ::batchConfig, + ::processBatchWithException, + ) + .join() + } + + acknowledgedList shouldNotContainAnyOf List(20) { 3 * it + 1 } + acknowledgedList.sorted() shouldBe processedList.sorted() + negativeAcknowledgedList shouldContainAll List(20) { 3 * it + 1 } + } + } + }, +) diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelopeTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelopeTests.kt index d10e722fb..615afe12e 100644 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelopeTests.kt +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowCmdEnvelopeTests.kt @@ -35,7 +35,7 @@ import io.kotest.matchers.shouldBe class WorkflowCmdEnvelopeTests : StringSpec( { - WorkflowStateEngineCmdMessage::class.sealedSubclasses.map { + WorkflowStateCmdMessage::class.sealedSubclasses.map { val msg = TestFactory.random(it) "WorkflowCmdEnvelope: ${msg::class.simpleName} should be have its workflowId as key" { @@ -43,7 +43,7 @@ class WorkflowCmdEnvelopeTests : } } - WorkflowStateEngineCmdMessage::class.sealedSubclasses.map { + WorkflowStateCmdMessage::class.sealedSubclasses.map { val msg = TestFactory.random(it) "WorkflowCmdEnvelope: ${msg::class.simpleName} should be avro-convertible" { diff --git a/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelopeTests.kt b/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelopeTests.kt index 3c3156c3f..9af3a6365 100644 --- a/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelopeTests.kt +++ b/infinitic-common/src/test/kotlin/io/infinitic/common/workflows/engine/messages/WorkflowEventEnvelopeTests.kt @@ -35,7 +35,7 @@ import io.kotest.matchers.shouldBe class WorkflowEventEnvelopeTests : StringSpec( { - WorkflowStateEngineEventMessage::class.sealedSubclasses.map { + WorkflowStateEventMessage::class.sealedSubclasses.map { val msg = TestFactory.random(it) "WorkflowEventEnvelope: ${msg::class.simpleName} should be avro-convertible" { diff --git a/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/TestFactory.kt b/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/TestFactory.kt index d6cc656f3..b0961d3f1 100644 --- a/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/TestFactory.kt +++ b/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/TestFactory.kt @@ -36,8 +36,8 @@ import io.infinitic.common.workflows.data.steps.Step import io.infinitic.common.workflows.data.steps.StepStatus import io.infinitic.common.workflows.engine.messages.WorkflowEngineEnvelope import io.infinitic.common.workflows.engine.messages.WorkflowEventEnvelope -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import org.jeasy.random.EasyRandom import org.jeasy.random.EasyRandomParameters import org.jeasy.random.FieldPredicates @@ -86,7 +86,7 @@ object TestFactory { WorkflowEngineEnvelope.from(random(sub)) } .randomize(WorkflowEventEnvelope::class.java) { - val sub = WorkflowStateEngineEventMessage::class.sealedSubclasses.shuffled().first() + val sub = WorkflowStateEventMessage::class.sealedSubclasses.shuffled().first() WorkflowEventEnvelope.from(random(sub)) } .randomize(ServiceEventEnvelope::class.java) { diff --git a/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/utils.kt b/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/utils.kt index c0abb49d4..eb05a37cf 100644 --- a/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/utils.kt +++ b/infinitic-common/src/testFixtures/kotlin/io/infinitic/common/fixtures/utils.kt @@ -22,14 +22,10 @@ */ package io.infinitic.common.fixtures -import io.kotest.assertions.throwables.shouldThrow import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.Dispatchers -import kotlinx.coroutines.cancel import kotlinx.coroutines.delay import kotlinx.coroutines.launch -import kotlinx.coroutines.withContext -import kotlin.coroutines.cancellation.CancellationException fun later(delay: Long = 100L, f: suspend CoroutineScope.() -> Unit) = CoroutineScope(Dispatchers.IO) .launch { @@ -37,24 +33,6 @@ fun later(delay: Long = 100L, f: suspend CoroutineScope.() -> Unit) = CoroutineS f() } -suspend fun runWithContextAndCancel(block: suspend context(CoroutineScope) () -> Unit) { - val scope = CoroutineScope(Dispatchers.IO) - - later { scope.cancel() } - - return block(scope) -} - -suspend fun runAndCancel(block: suspend () -> Unit): CancellationException { - val scope = CoroutineScope(Dispatchers.IO) - - later { scope.cancel() } - - return shouldThrow { - withContext(scope.coroutineContext) { block() } - } -} - /** * Compare two strings representing a version number */ diff --git a/infinitic-common/src/testFixtures/resources/pulsar b/infinitic-common/src/testFixtures/resources/pulsar index 8ffc1ad64..3a8b97174 100644 --- a/infinitic-common/src/testFixtures/resources/pulsar +++ b/infinitic-common/src/testFixtures/resources/pulsar @@ -1 +1 @@ -3.0.6 \ No newline at end of file +3.0.7 \ No newline at end of file diff --git a/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/TaskExecutor.kt b/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/TaskExecutor.kt index 19f74c676..f99c74a4e 100644 --- a/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/TaskExecutor.kt +++ b/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/TaskExecutor.kt @@ -22,6 +22,7 @@ */ package io.infinitic.tasks.executor +import io.github.oshai.kotlinlogging.KLogger import io.github.oshai.kotlinlogging.KotlinLogging import io.infinitic.annotations.Delegated import io.infinitic.clients.InfiniticClientInterface @@ -38,6 +39,7 @@ import io.infinitic.common.requester.workflowName import io.infinitic.common.requester.workflowVersion import io.infinitic.common.tasks.data.ServiceName import io.infinitic.common.tasks.data.TaskId +import io.infinitic.common.tasks.data.TaskMeta import io.infinitic.common.tasks.events.messages.TaskCompletedEvent import io.infinitic.common.tasks.events.messages.TaskFailedEvent import io.infinitic.common.tasks.events.messages.TaskRetriedEvent @@ -103,13 +105,30 @@ class TaskExecutor( executeTasks.process() } + context(KLogger) fun getBatchConfig(msg: ServiceExecutorMessage): BatchConfig? = when (msg) { is ExecuteTask -> msg.getBatchConfig() } - private fun ExecuteTask.getBatchConfig(): BatchConfig? = - getInstanceAndMethod().second.getBatchConfig() + context(KLogger) + private fun ExecuteTask.getBatchConfig(): BatchConfig? { + val (instance, method) = getInstanceAndMethod() + // get batch config for method + val methodBatchConfig = method.getBatchConfig() + // get batch key from message meta data + val messageBatchKey = taskMeta[TaskMeta.BATCH_KEY]?.let { String(it) } + // if messageBatchKey is defined, user should add a @batch method + if (methodBatchConfig == null && messageBatchKey != null) { + warn { + "Task $taskId has a batch key $messageBatchKey, but there is " + + "no @Batch method for method (${instance::class.java.name}.${method.name}))" + } + } + return methodBatchConfig?.copy( + batchKey = methodBatchConfig.batchKey + (messageBatchKey?.let { "_$it" } ?: ""), + ) + } private data class TaskData( val instance: Any, @@ -135,7 +154,8 @@ class TaskExecutor( .invoke(instance, batchMethod.getArgs(argsMap)) as Map } - private fun Map.toMetaMap() = map { (k, v) -> TaskId(k) to v.meta }.toMap() + private fun BatchData.toMetaMap(): Map> = + contextMap.mapValues { it.value.meta } private suspend fun List.process() = coroutineScope { // Signal that the tasks have started @@ -156,15 +176,14 @@ class TaskExecutor( when { unknownFromOutput.isNotEmpty() -> sendTaskFailed( - Exception("Unknown keys: ${unknownFromOutput.joinToString()}}"), - Task.batchContext.toMetaMap(), + Exception("Unknown keys: ${unknownFromOutput.joinToString()}}"), batchData.toMetaMap(), ) { "Error in the return values of the @batch ${batchData.batchMethod.batch.name} method return value" } missingFromOutput.isNotEmpty() -> sendTaskFailed( Exception("Missing keys: ${missingFromOutput.joinToString()}}"), - Task.batchContext.toMetaMap(), + batchData.toMetaMap(), ) { "Error in the return values of the @batch ${batchData.batchMethod.batch.name} method return value" } @@ -174,21 +193,25 @@ class TaskExecutor( } } - private suspend fun ExecuteTask.process() = coroutineScope { - // Signal that the task has started - sendTaskStarted() + private suspend fun ExecuteTask.process() { + logDebug { "Start processing $this" } + coroutineScope { + // Signal that the task has started + sendTaskStarted() - // Parse the task data. If parsing fails, return without proceeding - val taskData = parseTask().getOrElse { return@coroutineScope } + // Parse the task data. If parsing fails, return without proceeding + val taskData = parseTask().getOrElse { return@coroutineScope } - // Get the task timeout. If this operation fails, return without proceeding - val timeout = getTaskTimeout(taskData).getOrElse { return@coroutineScope } + // Get the task timeout. If this operation fails, return without proceeding + val timeout = getTaskTimeout(taskData).getOrElse { return@coroutineScope } - // Execute the task with the specified timeout. If this operation fails, return without proceeding - val output = executeWithTimeout(taskData, timeout).getOrElse { return@coroutineScope } + // Execute the task with the specified timeout. If this operation fails, return without proceeding + val output = executeWithTimeout(taskData, timeout).getOrElse { return@coroutineScope } - // Signal that the task has completed successfully - sendTaskCompleted(output, taskData) + // Signal that the task has completed successfully + sendTaskCompleted(output, taskData) + } + logTrace { "Ended processing $this" } } private suspend fun List.sendTaskStarted() = coroutineScope { @@ -264,7 +287,7 @@ class TaskExecutor( return try { withTimeout(timeout) { coroutineScope { - Task.setBatchContext(batchData.contextMap.mapKeys { it.toString() }) + batchData.contextMap.map { (k, v) -> Task.setContext(k.toString(), v) } Result.success(batchData.invoke()) } } @@ -275,7 +298,7 @@ class TaskExecutor( handleInvocationTargetException(batchData, e) Result.failure(e) } catch (e: Exception) { - sendTaskFailed(e, Task.batchContext.toMetaMap()) { + sendTaskFailed(e, batchData.toMetaMap()) { "An error occurred while processing batch messages" } Result.failure(e) diff --git a/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/task/TaskContextImpl.kt b/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/task/TaskContextImpl.kt index d1e63b82a..513629c92 100644 --- a/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/task/TaskContextImpl.kt +++ b/infinitic-task-executor/src/main/kotlin/io/infinitic/tasks/executor/task/TaskContextImpl.kt @@ -26,6 +26,7 @@ import io.infinitic.clients.InfiniticClientInterface import io.infinitic.common.data.methods.MethodName import io.infinitic.common.tasks.data.ServiceName import io.infinitic.common.tasks.data.TaskId +import io.infinitic.common.tasks.data.TaskMeta import io.infinitic.common.tasks.data.TaskRetryIndex import io.infinitic.common.tasks.data.TaskRetrySequence import io.infinitic.common.tasks.executors.errors.ExecutionError @@ -52,4 +53,6 @@ data class TaskContextImpl( override val withTimeout: WithTimeout?, override val withRetry: WithRetry?, override val client: InfiniticClientInterface -) : TaskContext +) : TaskContext { + override val batchKey get() = meta[TaskMeta.BATCH_KEY]?.let { String(it) } +} diff --git a/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchService.kt b/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchService.kt index a38d612ee..daa44e3d6 100644 --- a/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchService.kt +++ b/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchService.kt @@ -27,6 +27,7 @@ package io.infinitic.tests.batches import io.infinitic.annotations.Batch import io.infinitic.annotations.Name import io.infinitic.common.exceptions.thisShouldNotHappen +import io.infinitic.tasks.Task import io.mockk.InternalPlatformDsl.toArray fun main() { @@ -49,6 +50,7 @@ internal interface BatchService { fun foo3(input: Input): Int fun foo4(foo: Int): Input fun foo5(input: Input): Input + fun haveSameKey(i: Int): Boolean } internal class BatchServiceImpl : BatchService { @@ -58,6 +60,7 @@ internal class BatchServiceImpl : BatchService { override fun foo3(input: Input) = thisShouldNotHappen() override fun foo4(foo: Int) = thisShouldNotHappen() override fun foo5(input: Input) = thisShouldNotHappen() + override fun haveSameKey(i: Int) = thisShouldNotHappen() @Batch(maxMessages = 10, maxSeconds = 1.0) fun foo(list: Map): Map = @@ -78,6 +81,18 @@ internal class BatchServiceImpl : BatchService { @Batch(maxMessages = 10, maxSeconds = 1.0) fun foo5(list: Map): Map = list.mapValues { Input(list.values.sumOf { it.sum() }, it.value.bar) } + + @Batch(maxMessages = 10, maxSeconds = 2.0) + fun haveSameKey(all: Map): Map { + // get batch key for the first element + val batchKeys = all.keys.map { Task.getContext(it)!!.batchKey } + println("batchKeys = $batchKeys") + println("all = $all") + val batchKey = batchKeys.first() + // if all have the same batch keys then this should return Map + val allHaveSameKey = batchKeys.all { it == batchKey } + return all.mapValues { allHaveSameKey } + } } internal data class Input(val foo: Int, val bar: Int) { diff --git a/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflow.kt b/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflow.kt index e6c204739..eb25f78d3 100644 --- a/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflow.kt +++ b/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflow.kt @@ -24,6 +24,8 @@ package io.infinitic.tests.batches import io.infinitic.annotations.Name import io.infinitic.workflows.Workflow +import io.infinitic.workflows.and +import kotlin.random.Random @Name("batchWorkflow") internal interface BatchWorkflow { @@ -32,16 +34,31 @@ internal interface BatchWorkflow { fun foo3(foo: Int, bar: Int): Int fun foo4(foo: Int, bar: Int): Input fun foo5(foo: Int, bar: Int): Input + fun withKey(n: Int): Boolean } @Suppress("unused") internal class BatchWorkflowImpl : Workflow(), BatchWorkflow { private val batchService = newService(BatchService::class.java) + private val metaFoo: Map = mapOf("batchKey" to "Foo".toByteArray()) + private val metaBar: Map = mapOf("batchKey" to "Bar".toByteArray()) + private val batchServiceWithKeyFoo = newService(BatchService::class.java, null, metaFoo) + private val batchServiceWithKeyBar = newService(BatchService::class.java, null, metaBar) override fun add(value: Int) = batchService.foo(value) override fun foo2(foo: Int, bar: Int) = batchService.foo2(foo, bar) override fun foo3(foo: Int, bar: Int) = batchService.foo3(Input(foo, bar)) override fun foo4(foo: Int, bar: Int) = batchService.foo4(foo) override fun foo5(foo: Int, bar: Int) = batchService.foo5(Input(foo, bar)) + override fun withKey(n: Int): Boolean { + val deferredList = List(n) { + when (inline { Random.nextBoolean() }) { + true -> dispatch(batchServiceWithKeyFoo::haveSameKey, it) + false -> dispatch(batchServiceWithKeyBar::haveSameKey, it) + } + } + // return true if all true + return deferredList.and().await().all { it } + } } diff --git a/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflowTests.kt b/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflowTests.kt index 532c1cc00..5d7bcb2af 100644 --- a/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflowTests.kt +++ b/infinitic-tests/src/test/kotlin/io/infinitic/tests/batches/BatchesWorkflowTests.kt @@ -113,5 +113,9 @@ internal class BatchesWorkflowTests : StringSpec( batchWorkflow.foo5(9, 9) shouldBe Input(foo = 45 * 2, bar = 9) } + + "If Task contains a batch key, all batches should have the same key" { + batchWorkflow.withKey(20) shouldBe true + } }, ) diff --git a/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflow.kt b/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflow.kt index 87b2d2bd7..f12f0323b 100644 --- a/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflow.kt +++ b/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflow.kt @@ -88,7 +88,7 @@ class TimeoutsWorkflowImpl : Workflow(), TimeoutsWorkflow { interface ITimeoutWorkflow : WithTimeout { - // the workflow method 'withMethodTimeout' has a 100ms timeout + // the workflow method 'withMethodTimeout' has a 1s timeout fun withTimeoutOnMethod(duration: Long): Long override fun getTimeoutSeconds(): Double? = 0.4 diff --git a/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflowTests.kt b/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflowTests.kt index 03a6df48b..559bb57bb 100644 --- a/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflowTests.kt +++ b/infinitic-tests/src/test/kotlin/io/infinitic/tests/timeouts/TimeoutsWorkflowTests.kt @@ -55,11 +55,11 @@ internal class TimeoutsWorkflowTests : } "Synchronous call of a workflow running for more than its timeout should throw" { - shouldThrow { timeoutsWorkflow.withTimeoutOnMethod(2000) } + shouldThrow { timeoutsWorkflow.withTimeoutOnMethod(3000) } } "Synchronous call of a workflow running for less than its timeout should NOT throw" { - shouldNotThrowAny { timeoutsWorkflow.withTimeoutOnMethod(1) shouldBe 1 } + shouldNotThrowAny { timeoutsWorkflow.withTimeoutOnMethod(10) shouldBe 10 } } "Synchronous call of a child-workflow running for less than its timeout should NOT throw" { @@ -79,8 +79,9 @@ internal class TimeoutsWorkflowTests : } "timeout triggered in a synchronous task should throw" { - val e = - shouldThrow { timeoutsWorkflow.withTimeoutOnTask(2000) } + val e = shouldThrow { + timeoutsWorkflow.withTimeoutOnTask(2000) + } e.deferredException.shouldBeInstanceOf() val cause = e.deferredException as TaskTimedOutException diff --git a/infinitic-tests/src/test/kotlin/io/infinitic/utils/listeners.kt b/infinitic-tests/src/test/kotlin/io/infinitic/utils/listeners.kt index 87060f8b1..414e5816c 100644 --- a/infinitic-tests/src/test/kotlin/io/infinitic/utils/listeners.kt +++ b/infinitic-tests/src/test/kotlin/io/infinitic/utils/listeners.kt @@ -30,8 +30,8 @@ import io.infinitic.cloudEvents.CloudEventListener import java.util.concurrent.ConcurrentHashMap class Listener : CloudEventListener { - override fun onEvent(event: CloudEvent) { - events.add(event) + override fun onEvents(cloudEvents: List) { + cloudEvents.forEach { events.add(it) } } companion object { diff --git a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumer.kt b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumer.kt index 2bd4b6bbf..e93b4e0e9 100644 --- a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumer.kt +++ b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumer.kt @@ -22,7 +22,7 @@ */ package io.infinitic.inMemory -import io.github.oshai.kotlinlogging.KotlinLogging +import io.github.oshai.kotlinlogging.KLogger import io.infinitic.common.data.MillisInstant import io.infinitic.common.messages.Message import io.infinitic.common.transport.BatchConfig @@ -31,8 +31,9 @@ import io.infinitic.common.transport.InfiniticConsumer import io.infinitic.common.transport.MainSubscription import io.infinitic.common.transport.Subscription import io.infinitic.common.transport.TransportConsumer +import io.infinitic.common.transport.TransportMessage import io.infinitic.common.transport.acceptDelayed -import io.infinitic.common.transport.consumers.ProcessorConsumer +import io.infinitic.common.transport.consumers.startAsync import io.infinitic.inMemory.channels.DelayedMessage import io.infinitic.inMemory.channels.InMemoryChannels import io.infinitic.inMemory.consumers.InMemoryConsumer @@ -40,9 +41,7 @@ import io.infinitic.inMemory.consumers.InMemoryDelayedConsumer import io.infinitic.inMemory.consumers.InMemoryTransportMessage import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.Job -import kotlinx.coroutines.async import kotlinx.coroutines.channels.Channel -import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.launch class InMemoryInfiniticConsumer( @@ -50,65 +49,76 @@ class InMemoryInfiniticConsumer( private val eventListenerChannels: InMemoryChannels, ) : InfiniticConsumer { - context(CoroutineScope) + context(KLogger) + override suspend fun buildConsumers( + subscription: Subscription, + entity: String, + occurrence: Int? + ): List>> = List(occurrence ?: 1) { + when (subscription.topic.acceptDelayed) { + true -> InMemoryDelayedConsumer(subscription.topic, subscription.getChannelForDelayed(entity)) + false -> InMemoryConsumer(subscription.topic, subscription.getChannel(entity)) + } + } + + context(KLogger) + override suspend fun buildConsumer( + subscription: Subscription, + entity: String, + ): TransportConsumer> = + buildConsumers(subscription, entity, 1).first() + + context(CoroutineScope, KLogger) override suspend fun startAsync( subscription: Subscription, entity: String, concurrency: Int, process: suspend (S, MillisInstant) -> Unit, - beforeDlq: (suspend (S?, Exception) -> Unit)?, + beforeDlq: (suspend (S, Exception) -> Unit)?, batchConfig: (suspend (S) -> BatchConfig?)?, batchProcess: (suspend (List, List) -> Unit)? ): Job { - val loggedDeserialize: suspend (InMemoryTransportMessage) -> S = { message -> - logger.debug { "Deserializing message: ${message.messageId}" } - message.toMessage().also { - logger.trace { "Deserialized message: ${message.messageId}" } + val loggedDeserialize: suspend (TransportMessage) -> S = { message -> + debug { "Deserializing message: ${message.messageId}" } + message.deserialize().also { + trace { "Deserialized message: ${message.messageId}" } } } - val loggedHandler: suspend (S, MillisInstant) -> Unit = { message, publishTime -> - logger.debug { "Processing $message" } + val loggedProcess: suspend (S, MillisInstant) -> Unit = { message, publishTime -> + debug { "Processing $message" } process(message, publishTime) - logger.trace { "Processed $message" } - } - - fun buildConsumer(index: Int? = null): TransportConsumer> { - logger.debug { "Creating consumer ${index?.let { "${it + 1} " } ?: ""}on ${subscription.topic} for $entity " } - return when (subscription.topic.acceptDelayed) { - true -> InMemoryDelayedConsumer(subscription.getChannelForDelayed(entity)) - false -> InMemoryConsumer(subscription.getChannel(entity)) - } + trace { "Processed $message" } } return when (subscription.withKey) { true -> { - // build the consumers synchronously (but in parallel) - val consumers: List>> = coroutineScope { - List(concurrency) { async { buildConsumer(it) } }.map { it.await() } - } + // build the consumers synchronously + val consumers = buildConsumers(subscription, entity, concurrency) launch { - repeat(concurrency) { - val processor = ProcessorConsumer, S>(consumers[it], null) - with(processor) { startAsync(1, loggedDeserialize, loggedHandler) } + repeat(concurrency) { index -> + consumers[index].startAsync( + 1, + loggedDeserialize, + loggedProcess, + beforeDlq, + ) } } } false -> { // build the consumer synchronously - val consumer = buildConsumer() - val processor = ProcessorConsumer, S>(consumer, null) - with(processor) { - startAsync( - 1, - loggedDeserialize, - loggedHandler, - batchConfig, - batchProcess, - ) - } + val consumer = buildConsumer(subscription, entity) + consumer.startAsync( + concurrency, + loggedDeserialize, + loggedProcess, + beforeDlq, + batchConfig, + batchProcess, + ) } } } @@ -125,8 +135,5 @@ class InMemoryInfiniticConsumer( is EventListenerSubscription -> with(eventListenerChannels) { topic.channel(entity) } } - companion object { - private val logger = KotlinLogging.logger {} - } } diff --git a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/channels/InMemoryChannels.kt b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/channels/InMemoryChannels.kt index 7e3c5b2bb..05ccc5a65 100644 --- a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/channels/InMemoryChannels.kt +++ b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/channels/InMemoryChannels.kt @@ -43,10 +43,11 @@ import io.infinitic.common.transport.WorkflowStateEngineTopic import io.infinitic.common.transport.WorkflowStateEventTopic import io.infinitic.common.transport.WorkflowStateTimerTopic import io.infinitic.common.transport.WorkflowTagEngineTopic -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.common.workflows.tags.messages.WorkflowTagEngineMessage import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.channels.Channel.Factory.UNLIMITED import java.util.concurrent.ConcurrentHashMap class InMemoryChannels { @@ -67,7 +68,7 @@ class InMemoryChannels { internal val workflowStateEngineChannels = ConcurrentHashMap>() internal val workflowStateEventChannels = - ConcurrentHashMap>() + ConcurrentHashMap>() internal val workflowExecutorChannels = ConcurrentHashMap>() internal val workflowExecutorEventChannels = @@ -107,7 +108,7 @@ class InMemoryChannels { } as Channel> } - private fun newChannel(): () -> Channel = { Channel(10000) } + private fun newChannel(): () -> Channel = { Channel(UNLIMITED) } } internal val Channel<*>.id diff --git a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryConsumer.kt b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryConsumer.kt index fd54fe93b..2fc759f84 100644 --- a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryConsumer.kt +++ b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryConsumer.kt @@ -23,46 +23,49 @@ package io.infinitic.inMemory.consumers import io.infinitic.common.messages.Message +import io.infinitic.common.transport.Topic import io.infinitic.common.transport.TransportConsumer import io.infinitic.inMemory.channels.DelayedMessage -import kotlinx.coroutines.CoroutineScope -import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.channels.Channel import kotlinx.coroutines.delay -import kotlinx.coroutines.future.future -import java.util.concurrent.CompletableFuture +/** + * An in-memory implementation of a transport consumer that consumes messages from a Kotlin Coroutine [Channel]. + * + * @param S The type of message being consumed, which must extend the [Message] interface. + * @property channel The channel from which messages are consumed. + */ class InMemoryConsumer( + private val topic: Topic, private val channel: Channel ) : TransportConsumer> { - val scope = CoroutineScope(Dispatchers.IO) - override fun receiveAsync(): CompletableFuture> = scope.future { - InMemoryTransportMessage(channel.receive()) - } + override suspend fun receive() = InMemoryTransportMessage(channel.receive(), topic) - override fun negativeAcknowledgeAsync(message: InMemoryTransportMessage): CompletableFuture = - scope.future {} + override val maxRedeliveryCount = 1 - override fun acknowledgeAsync(message: InMemoryTransportMessage): CompletableFuture = - scope.future {} + override val name: String = toString() } +/** + * An implementation of [TransportConsumer] that receives [InMemoryTransportMessage] + * instances from a Kotlin [Channel] containing delayed messages. + * + * @param S The type of the messages being consumed, which must implement [Message]. + * @param channel A channel to receive delayed messages from. + */ class InMemoryDelayedConsumer( + private val topic: Topic, private val channel: Channel> ) : TransportConsumer> { - val scope = CoroutineScope(Dispatchers.IO) - override fun receiveAsync(): CompletableFuture> = scope.future { - channel.receive().let { message -> - delay(message.after.millis) - InMemoryTransportMessage(message.message) - } + override suspend fun receive(): InMemoryTransportMessage { + val message = channel.receive() + delay(message.after.millis) + return InMemoryTransportMessage(message.message, topic) } - override fun negativeAcknowledgeAsync(message: InMemoryTransportMessage): CompletableFuture = - scope.future {} + override val maxRedeliveryCount = 1 - override fun acknowledgeAsync(message: InMemoryTransportMessage): CompletableFuture = - scope.future {} + override val name: String = toString() } diff --git a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryTransportMessage.kt b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryTransportMessage.kt index b802aa2fe..0c5700e39 100644 --- a/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryTransportMessage.kt +++ b/infinitic-transport-inMemory/src/main/kotlin/io/infinitic/inMemory/consumers/InMemoryTransportMessage.kt @@ -24,12 +24,25 @@ package io.infinitic.inMemory.consumers import io.infinitic.common.data.MillisInstant import io.infinitic.common.messages.Message +import io.infinitic.common.transport.Topic +import io.infinitic.common.transport.TransportMessage + +class InMemoryTransportMessage(private val message: S, override val topic: Topic) : + TransportMessage { + private var hasBeenNegativelyAcknowledged = false -class InMemoryTransportMessage(private val message: S) : - io.infinitic.common.transport.TransportMessage { override val messageId: String = message.messageId.toString() - override val redeliveryCount: Int = 0 override val publishTime: MillisInstant = MillisInstant.now() - internal fun toMessage() = message -} + override fun deserialize() = message + + override suspend fun negativeAcknowledge() { + hasBeenNegativelyAcknowledged = true + } + + override suspend fun acknowledge() { + // nothing to do + } + + override val hasBeenSentToDeadLetterQueue: Boolean = hasBeenNegativelyAcknowledged +} diff --git a/infinitic-transport-inMemory/src/test/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumerTests.kt b/infinitic-transport-inMemory/src/test/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumerTests.kt new file mode 100644 index 000000000..1a565cfde --- /dev/null +++ b/infinitic-transport-inMemory/src/test/kotlin/io/infinitic/inMemory/InMemoryInfiniticConsumerTests.kt @@ -0,0 +1,90 @@ +/** + * "Commons Clause" License Condition v1.0 + * + * The Software is provided to you by the Licensor under the License, as defined below, subject to + * the following condition. + * + * Without limiting other conditions in the License, the grant of rights under the License will not + * include, and the License does not grant to you, the right to Sell the Software. + * + * For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you + * under the License to provide to third parties, for a fee or other consideration (including + * without limitation fees for hosting or consulting/ support services related to the Software), a + * product or service whose value derives, entirely or substantially, from the functionality of the + * Software. Any license notice or attribution required by the License must also include this + * Commons Clause License Condition notice. + * + * Software: Infinitic + * + * License: MIT License (https://opensource.org/licenses/MIT) + * + * Licensor: infinitic.io + */ +package io.infinitic.inMemory + +import io.github.oshai.kotlinlogging.KotlinLogging +import io.infinitic.common.data.MillisInstant +import io.infinitic.common.fixtures.TestFactory +import io.infinitic.common.tasks.data.ServiceName +import io.infinitic.common.tasks.data.TaskId +import io.infinitic.common.tasks.executors.messages.ExecuteTask +import io.infinitic.common.tasks.executors.messages.ServiceExecutorMessage +import io.infinitic.common.transport.MainSubscription +import io.infinitic.common.transport.ServiceExecutorTopic +import io.infinitic.inMemory.channels.InMemoryChannels +import io.kotest.core.spec.style.StringSpec +import io.kotest.matchers.longs.shouldBeLessThan +import io.kotest.matchers.shouldBe +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.cancel +import kotlinx.coroutines.delay +import java.util.concurrent.atomic.AtomicInteger +import kotlin.system.measureTimeMillis + +class InMemoryInfiniticConsumerTests : StringSpec( + { + val logger = KotlinLogging.logger {} + val serviceName = ServiceName("ServiceTest") + val mainChannels = InMemoryChannels() + val eventListenerChannels = InMemoryChannels() + + val consumer = InMemoryInfiniticConsumer(mainChannels, eventListenerChannels) + val producer = InMemoryInfiniticProducer(mainChannels, eventListenerChannels) + + val counter = AtomicInteger(0) + + fun process(msg: ServiceExecutorMessage, publishTime: MillisInstant) { + println(counter.incrementAndGet()) + Thread.sleep(1000) + } + + val executeTask = TestFactory.random().copy(serviceName = serviceName) + + "Tasks should be processed in parallel" { + with(logger) { + val scope = CoroutineScope(Dispatchers.IO) + + val job = with(scope) { + consumer.startAsync( + MainSubscription(ServiceExecutorTopic), + serviceName.toString(), + 10, + ::process, + null, + ) + } + + repeat(10) { + val m = executeTask.copy(taskId = TaskId()) + with(producer) { m.sendTo(ServiceExecutorTopic) } + } + delay(100) + scope.cancel() + val duration = measureTimeMillis { job.join() } + duration shouldBeLessThan 1200L + counter.get() shouldBe 10 + } + } + }, +) diff --git a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumer.kt b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumer.kt index 65cb3f68a..0921fe573 100644 --- a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumer.kt +++ b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumer.kt @@ -22,7 +22,7 @@ */ package io.infinitic.pulsar -import io.github.oshai.kotlinlogging.KotlinLogging +import io.github.oshai.kotlinlogging.KLogger import io.infinitic.common.data.MillisInstant import io.infinitic.common.messages.Envelope import io.infinitic.common.messages.Message @@ -31,11 +31,11 @@ import io.infinitic.common.transport.EventListenerSubscription import io.infinitic.common.transport.InfiniticConsumer import io.infinitic.common.transport.MainSubscription import io.infinitic.common.transport.Subscription -import io.infinitic.common.transport.consumers.ProcessorConsumer +import io.infinitic.common.transport.TransportMessage +import io.infinitic.common.transport.consumers.startAsync import io.infinitic.pulsar.client.InfiniticPulsarClient import io.infinitic.pulsar.config.PulsarConsumerConfig -import io.infinitic.pulsar.consumers.PulsarConsumer -import io.infinitic.pulsar.consumers.PulsarTransportMessage +import io.infinitic.pulsar.consumers.PulsarTransportConsumer import io.infinitic.pulsar.resources.PulsarResources import io.infinitic.pulsar.resources.defaultName import io.infinitic.pulsar.resources.defaultNameDLQ @@ -56,103 +56,134 @@ class PulsarInfiniticConsumer( private val pulsarResources: PulsarResources, ) : InfiniticConsumer { - context(CoroutineScope) + context(KLogger) + override suspend fun buildConsumers( + subscription: Subscription, + entity: String, + occurrence: Int? + ): List> { + // Retrieve the name of the topic and of the DLQ topic + // Create them if they do not exist. + val (topicName, topicDLQName) = getOrCreateTopics(subscription, entity) + + return coroutineScope { + List(occurrence ?: 1) { index -> + async { + val consumerName = entity + (occurrence?.let { "-${index + 1}" } ?: "") + debug { "Creating consumer '${consumerName}' for $topicName" } + getConsumer( + schema = subscription.topic.schema, + topic = topicName, + topicDlq = topicDLQName, + subscriptionName = subscription.name, + subscriptionNameDlq = subscription.nameDLQ, + subscriptionType = subscription.type, + consumerName = consumerName, + ).onSuccess { + trace { "Consumer '${consumerName}' created for $topicName" } + } + } + }.map { deferred -> + deferred.await() + .getOrThrow() // failed synchronously + .let { + PulsarTransportConsumer( + subscription.topic, + it, + pulsarConsumerConfig.getMaxRedeliverCount(), + ) + } + } + } + } + + context(KLogger) + override suspend fun buildConsumer( + subscription: Subscription, + entity: String, + ): PulsarTransportConsumer = buildConsumers(subscription, entity, null).first() + + context(CoroutineScope, KLogger) override suspend fun startAsync( subscription: Subscription, entity: String, concurrency: Int, process: suspend (S, MillisInstant) -> Unit, - beforeDlq: (suspend (S?, Exception) -> Unit)?, + beforeDlq: (suspend (S, Exception) -> Unit)?, batchConfig: (suspend (S) -> BatchConfig?)?, batchProcess: (suspend (List, List) -> Unit)? ): Job { - // Retrieve the name of the topic and of the DLQ topic - // Create them if they do not exist. - val (topicName, topicDLQName) = coroutineScope { - val deferredTopic = async { - with(pulsarResources) { - subscription.topic.forEntity(entity, true, checkConsumer = false) - } - } - val deferredTopicDLQ = async { - with(pulsarResources) { - subscription.topic.forEntityDLQ(entity, true) - } - } - Pair(deferredTopic.await(), deferredTopicDLQ.await()) - } - - val loggedDeserialize: suspend (PulsarTransportMessage>) -> S = { message -> - logger.debug { "Deserializing message: ${message.messageId}" } - message.toPulsarMessage().value.message().also { - logger.trace { "Deserialized message: ${message.messageId}" } + val loggedDeserialize: suspend (TransportMessage) -> S = { message -> + debug { "Deserializing message: ${message.messageId}" } + message.deserialize().also { + trace { "Deserialized message: ${message.messageId}" } } } val loggedHandler: suspend (S, MillisInstant) -> Unit = { message, publishTime -> - logger.debug { "Processing $message" } + debug { "Processing $message" } process(message, publishTime) - logger.trace { "Processed $message" } - } - - val beforeNegativeAcknowledgement: suspend (PulsarTransportMessage>, S?, Exception) -> Unit = - { message, deserialized, cause -> - if (message.redeliveryCount == pulsarConsumerConfig.maxRedeliverCount) { - beforeDlq?.let { - logger.debug { "Processing beforeNegativeAcknowledgement for ${deserialized ?: message.messageId}" } - it(deserialized, cause) - logger.trace { "Processed beforeNegativeAcknowledgement for ${deserialized ?: message.messageId}" } - } - } - } - - fun buildConsumer(index: Int? = null): PulsarConsumer> { - logger.debug { "Creating consumer ${index?.let { "${it + 1} " } ?: ""}for $topicName" } - return getConsumer( - schema = subscription.topic.schema, - topic = topicName, - topicDlq = topicDLQName, - subscriptionName = subscription.name, - subscriptionNameDlq = subscription.nameDLQ, - subscriptionType = subscription.type, - consumerName = entity + (index?.let { "-$it" } ?: ""), - ).getOrThrow().let { PulsarConsumer(it) }.also { - logger.trace { "Consumer created ${index?.let { "${it + 1} " } ?: ""}for $topicName" } - } + trace { "Processed $message" } } return when (subscription.withKey) { true -> { - // build the consumers synchronously (but in parallel) - val consumers = coroutineScope { - List(concurrency) { async { buildConsumer(it) } }.map { it.await() } - } + // multiple consumers with unique processing + val consumers = buildConsumers(subscription, entity, concurrency) launch { - List(concurrency) { index -> - val processor = ProcessorConsumer(consumers[index], beforeNegativeAcknowledgement) - with(processor) { startAsync(1, loggedDeserialize, loggedHandler) } + repeat(concurrency) { index -> + consumers[index].startAsync( + concurrency = 1, + loggedDeserialize, + loggedHandler, + beforeDlq, + ) } } } false -> { - // build the unique consumer synchronously - val consumer = buildConsumer() - val processor = ProcessorConsumer(consumer, beforeNegativeAcknowledgement) - with(processor) { - startAsync( - concurrency, - loggedDeserialize, - loggedHandler, - batchConfig, - batchProcess, - ) - } + // unique consumer with parallel processing + val consumer: PulsarTransportConsumer = buildConsumer(subscription, entity) + consumer.startAsync( + concurrency, + loggedDeserialize, + loggedHandler, + beforeDlq, + batchConfig, + batchProcess, + ) } } } + /** + * Retrieves the name of the topic and the DLQ topic for a given entity. + * The topics are created if they do not exist. + * + * @param M The type of the message. + * @param subscription The subscription containing topic information. + * @param entity The entity for which the topic names are to be retrieved. + * @return A pair containing the topic name and the DLQ topic name. + */ + private suspend fun getOrCreateTopics( + subscription: Subscription, + entity: String, + ): Pair = coroutineScope { + val deferredTopic = async { + with(pulsarResources) { + subscription.topic.forEntity(entity, true, checkConsumer = false) + } + } + val deferredTopicDLQ = async { + with(pulsarResources) { + subscription.topic.forEntityDLQ(entity, true) + } + } + Pair(deferredTopic.await(), deferredTopicDLQ.await()) + } + private val Subscription<*>.name get() = when (this) { is MainSubscription -> defaultName @@ -165,7 +196,7 @@ class PulsarInfiniticConsumer( is EventListenerSubscription -> name?.let { "$it-dlq" } ?: defaultNameDLQ } - private fun > getConsumer( + private fun , M : Message> getConsumer( schema: Schema, topic: String, topicDlq: String?, @@ -193,9 +224,5 @@ class PulsarInfiniticConsumer( return client.newConsumer(schema, consumerDef, consumerDefDlq) } - - companion object { - val logger = KotlinLogging.logger {} - } } diff --git a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/client/InfiniticPulsarClient.kt b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/client/InfiniticPulsarClient.kt index 068cbb2b9..16bba6d41 100644 --- a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/client/InfiniticPulsarClient.kt +++ b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/client/InfiniticPulsarClient.kt @@ -224,6 +224,8 @@ class InfiniticPulsarClient(private val pulsarClient: PulsarClient) { consumerDefDlq: ConsumerDef? = null, ): Result> { + logger.info { "Creating consumer with $consumerDef" } + val (topic, subscriptionName, subscriptionType, diff --git a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarConsumer.kt b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportConsumer.kt similarity index 60% rename from infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarConsumer.kt rename to infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportConsumer.kt index d1e5757aa..ee8e99715 100644 --- a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarConsumer.kt +++ b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportConsumer.kt @@ -22,21 +22,24 @@ */ package io.infinitic.pulsar.consumers +import io.infinitic.common.messages.Envelope +import io.infinitic.common.messages.Message +import io.infinitic.common.transport.Topic +import io.infinitic.common.transport.TransportConsumer +import kotlinx.coroutines.future.await import org.apache.pulsar.client.api.Consumer -import java.util.concurrent.CompletableFuture -class PulsarConsumer( - private val pulsarConsumer: Consumer -) : io.infinitic.common.transport.TransportConsumer> { - override fun receiveAsync(): CompletableFuture> = - pulsarConsumer.receiveAsync().thenApply { PulsarTransportMessage(it) } +class PulsarTransportConsumer( + private val topic: Topic, + private val pulsarConsumer: Consumer>, + override val maxRedeliveryCount: Int +) : TransportConsumer> { - override fun negativeAcknowledgeAsync(message: PulsarTransportMessage): CompletableFuture { - pulsarConsumer.negativeAcknowledge(message.toPulsarMessage()) - return CompletableFuture.completedFuture(Unit) + override suspend fun receive(): PulsarTransportMessage { + val pulsarMessage = pulsarConsumer.receiveAsync().await() + + return PulsarTransportMessage(pulsarMessage, pulsarConsumer, topic, maxRedeliveryCount) } - override fun acknowledgeAsync(message: PulsarTransportMessage): CompletableFuture = - pulsarConsumer.acknowledgeAsync(message.toPulsarMessage()).thenApply { } + override val name: String = pulsarConsumer.consumerName } - diff --git a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportMessage.kt b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportMessage.kt index 1feccd698..d10c62544 100644 --- a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportMessage.kt +++ b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/consumers/PulsarTransportMessage.kt @@ -23,21 +23,55 @@ package io.infinitic.pulsar.consumers import io.infinitic.common.data.MillisInstant +import io.infinitic.common.messages.Envelope +import io.infinitic.common.messages.Message +import io.infinitic.common.transport.Topic import io.infinitic.common.transport.TransportMessage -import org.apache.pulsar.client.api.Messages +import kotlinx.coroutines.future.await +import org.apache.pulsar.client.api.Consumer as PulsarConsumer import org.apache.pulsar.client.api.Message as PulsarMessage -class PulsarTransportMessage(private val pulsarMessage: PulsarMessage) : TransportMessage { - override val messageId: String = pulsarMessage.messageId.toString() - override val redeliveryCount: Int = pulsarMessage.redeliveryCount - override val publishTime: MillisInstant = MillisInstant(pulsarMessage.publishTime) - internal fun toPulsarMessage() = pulsarMessage -} +class PulsarTransportMessage( + private val pulsarMessage: PulsarMessage>, + private val pulsarConsumer: PulsarConsumer>, + override val topic: Topic, + maxRedeliveryCount: Int +) : TransportMessage { + override val publishTime = MillisInstant(pulsarMessage.publishTime) + override val messageId = pulsarMessage.messageId.toString() -internal class PulsarMessages(val messages: List>) : Messages { - override fun iterator() = messages.toMutableList().iterator() - override fun size() = messages.size -} + /** + * Deserializes the message from the pulsarMessage into its original form. + * + * @return The deserialized message of type M. + */ + override fun deserialize(): M = pulsarMessage.value.message() + + /** + * Synchronously acknowledges that the message has been successfully processed. + * This operation informs the PulsarConsumer that the message has been successfully processed + * and can be removed from the subscription. + */ + override suspend fun acknowledge() { + pulsarConsumer.acknowledgeAsync(pulsarMessage).await() + } -internal fun List>.toPulsarMessages() = - PulsarMessages(map { it.toPulsarMessage() }) + /** + * Processes a negative acknowledgment for the given message, indicating to the Pulsar consumer + * that the message could not be processed successfully and should be redelivered. + * + * This method invokes the `negativeAcknowledge` method on the underlying Pulsar consumer, which handles the logic + * for message redelivery based on the consumer's configuration and the message's redelivery count. + */ + override suspend fun negativeAcknowledge() { + pulsarConsumer.negativeAcknowledge(pulsarMessage) + } + + /** + * Indicates whether the message has been sent to the Dead Letter Queue (DLQ). + * + * This property is `true` if the number of redelivery attempts for the Pulsar message has reached + * the maximum redelivery count allowed, as defined by `maxRedeliveryCount`. + */ + override val hasBeenSentToDeadLetterQueue = (maxRedeliveryCount == pulsarMessage.redeliveryCount) +} diff --git a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/resources/Topics.kt b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/resources/Topics.kt index 0d4d86358..09fc12014 100644 --- a/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/resources/Topics.kt +++ b/infinitic-transport-pulsar/src/main/kotlin/io/infinitic/pulsar/resources/Topics.kt @@ -53,9 +53,9 @@ import io.infinitic.common.transport.WorkflowTopic import io.infinitic.common.workflows.engine.messages.WorkflowCmdEnvelope import io.infinitic.common.workflows.engine.messages.WorkflowEngineEnvelope import io.infinitic.common.workflows.engine.messages.WorkflowEventEnvelope -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.common.workflows.tags.messages.WorkflowTagEngineMessage import io.infinitic.common.workflows.tags.messages.WorkflowTagEnvelope import io.infinitic.pulsar.schemas.schemaDefinition @@ -105,7 +105,7 @@ internal fun Topic<*>.nameDLQ(entity: String) = * @param S The type of the message contained in the topic. * @return The schema of the topic. */ -internal val Topic.schema: Schema> +internal val Topic.schema: Schema> get() = Schema.AVRO(schemaDefinition(envelopeClass)) @@ -159,7 +159,7 @@ internal fun getWorkflowNameFromTopicName(topicName: String): String? { * @return The envelope class that is associated with the topic. */ @Suppress("UNCHECKED_CAST") -internal val Topic.envelopeClass: KClass> +internal val Topic.envelopeClass: KClass> get() = when (this) { NamingTopic -> thisShouldNotHappen() ClientTopic -> ClientEnvelope::class @@ -172,7 +172,7 @@ internal val Topic.envelopeClass: KClass> ServiceTagEngineTopic -> ServiceTagEnvelope::class ServiceExecutorTopic, ServiceExecutorRetryTopic -> ServiceExecutorEnvelope::class ServiceExecutorEventTopic -> ServiceEventEnvelope::class - } as KClass> + } as KClass> @Suppress("UNCHECKED_CAST") internal fun Topic.envelope(message: S) = @@ -180,9 +180,9 @@ internal fun Topic.envelope(message: S) = NamingTopic -> thisShouldNotHappen() ClientTopic -> ClientEnvelope.from(message as ClientMessage) WorkflowTagEngineTopic -> WorkflowTagEnvelope.from(message as WorkflowTagEngineMessage) - WorkflowStateCmdTopic -> WorkflowCmdEnvelope.from(message as WorkflowStateEngineCmdMessage) + WorkflowStateCmdTopic -> WorkflowCmdEnvelope.from(message as WorkflowStateCmdMessage) WorkflowStateEngineTopic, WorkflowStateTimerTopic -> WorkflowEngineEnvelope.from(message as WorkflowStateEngineMessage) - WorkflowStateEventTopic -> WorkflowEventEnvelope.from(message as WorkflowStateEngineEventMessage) + WorkflowStateEventTopic -> WorkflowEventEnvelope.from(message as WorkflowStateEventMessage) WorkflowExecutorTopic, WorkflowExecutorRetryTopic -> ServiceExecutorEnvelope.from(message as ServiceExecutorMessage) diff --git a/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumerTests.kt b/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumerTests.kt index 13d29d510..cf3675e09 100644 --- a/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumerTests.kt +++ b/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticConsumerTests.kt @@ -23,8 +23,9 @@ package io.infinitic.pulsar +import io.github.oshai.kotlinlogging.KotlinLogging import io.infinitic.common.clients.data.ClientName -import io.infinitic.common.fixtures.runWithContextAndCancel +import io.infinitic.common.fixtures.later import io.infinitic.common.messages.Envelope import io.infinitic.common.messages.Message import io.infinitic.common.tasks.data.ServiceName @@ -51,13 +52,19 @@ import io.mockk.coVerify import io.mockk.every import io.mockk.mockk import io.mockk.spyk +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.cancel +import kotlinx.coroutines.delay import net.bytebuddy.utility.RandomString -import org.apache.pulsar.client.api.Consumer import org.apache.pulsar.client.api.Schema import java.util.concurrent.CompletableFuture +import org.apache.pulsar.client.api.Consumer as PulsarConsumer +import org.apache.pulsar.client.api.Message as PulsarMessage class PulsarInfiniticConsumerTests : StringSpec( { + val logger = KotlinLogging.logger {} val clientName = ClientName("clientTest") val workflowName = WorkflowName("workflowTest") val serviceName = ServiceName("serviceTest") @@ -80,8 +87,15 @@ class PulsarInfiniticConsumerTests : StringSpec( coEvery { initDlqTopicOnce(any(), any(), any()) } returns Result.success(Unit) } - val pulsarConsumer = mockk>> { - every { receiveAsync() } returns CompletableFuture>>() + val pulsarConsumer = mockk>> { + // delay here is to avoid the main loop in startConsuming to loop too quickly, + // creating a huge amount of CompletableFuture and Mockk objects, + // eventually leading to memory issues + coEvery { receiveAsync() } coAnswers { + delay(10) + CompletableFuture>>() + } + every { consumerName } returns "consumerName" } val client = mockk { @@ -89,249 +103,277 @@ class PulsarInfiniticConsumerTests : StringSpec( Result.success(pulsarConsumer) } - val infiniticConsumer = - PulsarInfiniticConsumer(client, pulsarConfig.consumer, pulsarResources) - - "should init client-response topic before consuming it" { - val name = "$clientName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(ClientTopic), - entity = name, - concurrency = 1, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } + val pulsarInfiniticConsumer = PulsarInfiniticConsumer( + client, pulsarConfig.consumer, pulsarResources, + ) + fun getScope(): CoroutineScope { + val scope = CoroutineScope(Dispatchers.IO) + later(500) { scope.cancel() } + return scope + } - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/response:$name", - isPartitioned = false, - isTimer = false, - ) + "should init client-response topic before consuming it" { + with(logger) { + val name = "$clientName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(ClientTopic), + entity = name, + concurrency = 1, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/response:$name", + isPartitioned = false, + isTimer = false, + ) + } } } "should init workflow-tag topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowTagEngineTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-tag:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowTagEngineTopic), + entity = name, + concurrency = 1, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-tag:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init workflow-cmd topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowStateCmdTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-cmd:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowStateCmdTopic), + entity = name, + concurrency = 1, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-cmd:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init workflow-engine topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowStateEngineTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-engine:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowStateEngineTopic), + entity = name, + concurrency = 1, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-engine:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init workflow-delay topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowStateTimerTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-delay:$name", - isPartitioned = true, - isTimer = true, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowStateTimerTopic), + entity = name, + concurrency = 10, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-delay:$name", + isPartitioned = true, + isTimer = true, + ) + } } } "should init workflow-events topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowStateEventTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-events:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowStateEventTopic), + entity = name, + concurrency = 10, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-events:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init workflow-task-executor topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowExecutorTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-task-executor:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowExecutorTopic), + entity = name, + concurrency = 1, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-task-executor:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init workflow-task-events topic before consuming it" { - val name = "$workflowName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(WorkflowExecutorEventTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/workflow-task-events:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$workflowName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(WorkflowExecutorEventTopic), + entity = name, + concurrency = 10, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/workflow-task-events:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init task-tag topic before consuming it" { - val name = "$serviceName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(ServiceTagEngineTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/task-tag:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$serviceName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(ServiceTagEngineTopic), + entity = name, + concurrency = 1, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/task-tag:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init task-executor topic before consuming it" { - val name = "$serviceName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(ServiceExecutorTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/task-executor:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$serviceName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(ServiceExecutorTopic), + entity = name, + concurrency = 10, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/task-executor:$name", + isPartitioned = true, + isTimer = false, + ) + } } } "should init task-events topic before consuming it" { - val name = "$serviceName" - - runWithContextAndCancel { - infiniticConsumer.start( - subscription = MainSubscription(ServiceExecutorEventTopic), - entity = name, - concurrency = 10, - process = { _, _ -> }, - beforeDlq = { _, _ -> }, - ) - } - - coVerify { - pulsarResources.initTopicOnce( - "persistent://$tenant/$namespace/task-events:$name", - isPartitioned = true, - isTimer = false, - ) + with(logger) { + val name = "$serviceName" + + with(getScope()) { + pulsarInfiniticConsumer.start( + subscription = MainSubscription(ServiceExecutorEventTopic), + entity = name, + concurrency = 10, + process = { _, _ -> }, + beforeDlq = { _, _ -> }, + ) + } + + coVerify { + pulsarResources.initTopicOnce( + "persistent://$tenant/$namespace/task-events:$name", + isPartitioned = true, + isTimer = false, + ) + } } } }, diff --git a/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticProducerTests.kt b/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticProducerTests.kt index 0b487d42e..40ed43189 100644 --- a/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticProducerTests.kt +++ b/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/PulsarInfiniticProducerTests.kt @@ -45,9 +45,9 @@ import io.infinitic.common.transport.WorkflowStateEventTopic import io.infinitic.common.transport.WorkflowStateTimerTopic import io.infinitic.common.transport.WorkflowTagEngineTopic import io.infinitic.common.workflows.data.workflowTasks.WorkflowTask -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateCmdMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.common.workflows.tags.messages.WorkflowTagEngineMessage import io.infinitic.pulsar.admin.InfiniticPulsarAdmin import io.infinitic.pulsar.client.InfiniticPulsarClient @@ -133,7 +133,7 @@ class PulsarInfiniticProducerTests : StringSpec( } "publishing to an absent WorkflowCmdTopic should not throw, should create the topic" { - val message = TestFactory.random() + val message = TestFactory.random() // publishing to an absent WorkflowCmdTopic should not throw shouldNotThrowAny { @@ -173,7 +173,7 @@ class PulsarInfiniticProducerTests : StringSpec( } "publishing to an absent WorkflowEventTopic should not throw, should create the topic" { - val message = TestFactory.random() + val message = TestFactory.random() // publishing to an absent WorkflowEventsTopic should not throw diff --git a/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/consumers/ConsumerTests.kt b/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/consumers/ConsumerTests.kt index ba327a5d7..12de57ac0 100644 --- a/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/consumers/ConsumerTests.kt +++ b/infinitic-transport-pulsar/src/test/kotlin/io/infinitic/pulsar/consumers/ConsumerTests.kt @@ -23,6 +23,7 @@ package io.infinitic.pulsar.consumers +import io.github.oshai.kotlinlogging.KotlinLogging import io.infinitic.common.data.MillisDuration import io.infinitic.common.data.MillisInstant import io.infinitic.common.fixtures.DockerOnly @@ -54,7 +55,6 @@ import kotlinx.coroutines.cancel import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.delay import kotlinx.coroutines.launch -import kotlinx.coroutines.withContext import net.bytebuddy.utility.RandomString import java.time.Duration import java.time.Instant @@ -63,6 +63,7 @@ import java.util.concurrent.atomic.AtomicInteger @EnabledIf(DockerOnly::class) class ConsumerTests : StringSpec( { + val logger = KotlinLogging.logger("test") val pulsarConfig = pulsarConfigTest!! val resources = pulsarConfig.pulsarResources @@ -99,7 +100,7 @@ class ConsumerTests : StringSpec( is ExecuteTask -> message.copy(taskId = TaskId()) else -> message } as S - producer.internalSendTo(message = m, topic = topic, after = zero) + with(producer) { m.sendTo(topic, zero) } } } } @@ -114,250 +115,190 @@ class ConsumerTests : StringSpec( } "consuming 1000 messages (1ms) without concurrency should take less than 5 ms in average" { - val entity = RandomString(10).nextString() - val message = TestFactory.random(mapOf("serviceName" to ServiceName(entity))) - val topic = ServiceExecutorTopic - val total = 1000 - - later { - // send $total messages - sendMessages(topic, message, total) - } + with(logger) { + val entity = RandomString(10).nextString() + + val message = TestFactory.random( + mapOf("serviceName" to ServiceName(entity)), + ) + val topic = ServiceExecutorTopic + val total = 1000 + + later { + // send $total messages + sendMessages(topic, message, total) + } - val subscription = MainSubscription(topic) - var averageMillisToConsume = 100.0 - - try { - withContext(CoroutineScope(Dispatchers.IO).coroutineContext) { - val counter = AtomicInteger(0) - lateinit var start: Instant - - val handler: suspend (ServiceExecutorMessage, MillisInstant) -> Unit = { _, _ -> - if (counter.get() == 0) start = Instant.now() - // emulate a 1ms task - delay(1) - // increment counter - counter.incrementAndGet().let { - if (it == total) { - averageMillisToConsume = (start.fromNow() / total) - println("Average time to consume a message: $averageMillisToConsume ms") - // delete current scope - cancel() - } + val subscription = MainSubscription(topic) + var averageMillisToConsume = 100.0 + + val scope = CoroutineScope(Dispatchers.IO) + val counter = AtomicInteger(0) + lateinit var start: Instant + + val handler: suspend (ServiceExecutorMessage, MillisInstant) -> Unit = { _, _ -> + if (counter.get() == 0) start = Instant.now() + // emulate a 1ms task + delay(1) + // increment counter + counter.incrementAndGet().let { + if (it == total) { + averageMillisToConsume = (start.fromNow() / total) + println("Average time to consume a message: $averageMillisToConsume ms") + scope.cancel() } } + } - consumer.start(subscription, entity, 1, handler, null) + try { + with(scope) { + consumer.start(subscription, entity, 1, handler, null) + } + } catch (e: CancellationException) { + // do nothing } - } catch (e: CancellationException) { - // do nothing + averageMillisToConsume shouldBeLessThan 5.0 } - averageMillisToConsume shouldBeLessThan 5.0 } "consuming 1000 messages (100ms) with 100 concurrency should take less than 5 ms in average" { - val entity = RandomString(10).nextString() - val message = TestFactory.random( - mapOf("serviceName" to ServiceName(entity)), - ) - val topic = ServiceExecutorTopic - val total = 1000 - - later { - // send $total messages - sendMessages(topic, message, total) - } + with(logger) { + val entity = RandomString(10).nextString() + + val message = TestFactory.random( + mapOf("serviceName" to ServiceName(entity)), + ) + val topic = ServiceExecutorTopic + val total = 1000 + + later { + // send $total messages + sendMessages(topic, message, total) + } - val subscription = MainSubscription(topic) - var averageMillisToConsume = 100.0 - - try { - withContext(CoroutineScope(Dispatchers.IO).coroutineContext) { - val counter = AtomicInteger(0) - lateinit var start: Instant - - val handler: suspend (ServiceExecutorMessage, MillisInstant) -> Unit = { _, _ -> - if (counter.get() == 0) start = Instant.now() - // emulate a 100ms task - delay(100) - // increment counter - counter.incrementAndGet().let { - if (it == total) { - averageMillisToConsume = (start.fromNow() / total) - println("Average time to consume a message: $averageMillisToConsume ms") - // delete current scope - cancel() - } + val subscription = MainSubscription(topic) + var averageMillisToConsume = 100.0 + + val counter = AtomicInteger(0) + lateinit var start: Instant + val scope = CoroutineScope(Dispatchers.IO) + + val handler: suspend (ServiceExecutorMessage, MillisInstant) -> Unit = { _, _ -> + if (counter.get() == 0) start = Instant.now() + // emulate a 100ms task + delay(100) + // increment counter + counter.incrementAndGet().let { + if (it == total) { + averageMillisToConsume = (start.fromNow() / total) + println("Average time to consume a message: $averageMillisToConsume ms") + // delete current scope + scope.cancel() } } + } - consumer.start(subscription, entity, 100, handler, null) + try { + with(scope) { + consumer.start(subscription, entity, 100, handler, null) + } + } catch (e: CancellationException) { + // do nothing } - } catch (e: CancellationException) { - // do nothing + averageMillisToConsume shouldBeLessThan 5.0 } - averageMillisToConsume shouldBeLessThan 5.0 } "consuming 1000 messages (1ms) with 1 concurrency (key-shared) should take less than 5 ms in average" { - val entity = RandomString(10).nextString() - val message = TestFactory.random( - mapOf("workflowName" to WorkflowName(entity)), - ) - val topic = WorkflowStateEngineTopic - val total = 1000 - - later { - // send messages + with(logger) { + val entity = RandomString(10).nextString() + + val message = TestFactory.random( + mapOf("workflowName" to WorkflowName(entity)), + ) + val topic = WorkflowStateEngineTopic + val total = 1000 + + // send $total messages sendMessages(topic, message, total) - } - val subscription = MainSubscription(topic) - var averageMillisToConsume = 100.0 - - try { - withContext(CoroutineScope(Dispatchers.IO).coroutineContext) { - val counter = AtomicInteger(0) - lateinit var start: Instant - - val handler: suspend (WorkflowStateEngineMessage, MillisInstant) -> Unit = { _, _ -> - if (counter.get() == 0) start = Instant.now() - // emulate a 1ms task - delay(1) - // increment counter - counter.incrementAndGet().let { - if (it == total) { - averageMillisToConsume = (start.fromNow() / total) - println("Average time to consume a message: $averageMillisToConsume ms") - // delete current scope - cancel() - } + val subscription = MainSubscription(topic) + var averageMillisToConsume = 100.0 + + val scope = CoroutineScope(Dispatchers.IO) + val counter = AtomicInteger(0) + lateinit var start: Instant + + val handler: suspend (WorkflowStateEngineMessage, MillisInstant) -> Unit = { _, _ -> + if (counter.get() == 0) start = Instant.now() + // emulate a 1ms task + delay(1) + // increment counter + counter.incrementAndGet().let { + if (it == total) { + averageMillisToConsume = (start.fromNow() / total) + println("Average time to consume a message: $averageMillisToConsume ms") + scope.cancel() } } + } - consumer.start(subscription, entity, 1, handler, null) + try { + with(scope) { + consumer.start(subscription, entity, 1, handler, null) + } + } catch (e: CancellationException) { + // do nothing } - } catch (e: CancellationException) { - // do nothing + averageMillisToConsume shouldBeLessThan 5.0 } - - averageMillisToConsume shouldBeLessThan 5.0 } "consuming 1000 messages (100ms) with 100 concurrency (key-shared) should take less than 5 ms in average" { - val entity = RandomString(10).nextString() - val message = TestFactory.random( - mapOf("workflowName" to WorkflowName(entity)), - ) - val topic = WorkflowStateEngineTopic - val total = 1000 - - later { - // send messages - sendMessages(topic, message, total) - } - - val subscription = MainSubscription(topic) - var averageMillisToConsume = 100.0 - - try { - withContext(CoroutineScope(Dispatchers.IO).coroutineContext) { - val counter = AtomicInteger(0) - lateinit var start: Instant - - val handler: suspend (WorkflowStateEngineMessage, MillisInstant) -> Unit = { _, _ -> - if (counter.get() == 0) start = Instant.now() - // emulate a 100ms task - delay(100) - // increment counter - counter.incrementAndGet().let { - if (it == total) { - averageMillisToConsume = (start.fromNow() / total) - println("Average time to consume a message: $averageMillisToConsume ms") - // delete current scope - cancel() - } + with(logger) { + val entity = RandomString(10).nextString() + + val message = TestFactory.random( + mapOf("workflowName" to WorkflowName(entity)), + ) + val topic = WorkflowStateEngineTopic + val total = 1000 + + val subscription = MainSubscription(topic) + var averageMillisToConsume = 100.0 + + val scope = CoroutineScope(Dispatchers.IO) + val counter = AtomicInteger(0) + lateinit var start: Instant + + val process: suspend (WorkflowStateEngineMessage, MillisInstant) -> Unit = { _, _ -> + if (counter.get() == 0) start = Instant.now() + // emulate a 100 ms task + delay(100) + // increment counter + counter.incrementAndGet().let { + if (it == total) { + averageMillisToConsume = (start.fromNow() / total) + println("Average time to consume a message: $averageMillisToConsume ms") + scope.cancel() } } + } - consumer.start(subscription, entity, 100, handler, null) + try { + val job = with(scope) { + consumer.startAsync(subscription, entity, 100, process, null) + } + // on the consumer created, we send the messages + // to avoid that the first consumer up captures all keys right-away + sendMessages(topic, message, total) + // wait for the cancellation triggered when reaching total in process + job.join() + } catch (e: CancellationException) { + // do nothing } - } catch (e: CancellationException) { - // do nothing + averageMillisToConsume shouldBeLessThan 5.0 } - - averageMillisToConsume shouldBeLessThan 5.0 } - -// -// "graceful shutdown with Shared" { -// val consumer = ConsumerFactory(client, PulsarConsumerConfig()) -// val topic = RandomString(10).nextString() -// val counter = AtomicInteger(0) -// val messageOpen = CopyOnWriteArrayList() -// val messageClosed = CopyOnWriteArrayList() -// val total = 1000 -// -// val scope = getScope() -// -// val handler: ((ServiceExecutorMessage, MillisInstant) -> Unit) = { _, _ -> -// counter.incrementAndGet().let { -// // begin of task -// messageOpen.add(it) -// // emulate a 100ms task -// Thread.sleep(100) -// // enf of task -// messageClosed.add(it) -// } -// } -// // start consumers -// scope.startAsync(consumer, handler, topic, 100) -// // send messages -// sendMessage(topic, total) -// // cancel after 0.4s -// later(400) { scope.cancel() } -// // wait for scope cancellation -// scope.coroutineContext.job.join() -// -// // for the test to be meaningful, all messages should not have been processed -// messageOpen.count().shouldBeLessThan(total) -// messageClosed.count().shouldBeLessThan(total) -// messageOpen.count().shouldBeExactly(messageClosed.count()) -// } -// -// "graceful shutdown with Key-Shared" { -// val consumer = ConsumerFactory(client, PulsarConsumerConfig()) -// val topic = RandomString(10).nextString() -// val counter = AtomicInteger(0) -// val messageOpen = CopyOnWriteArrayList() -// val messageClosed = CopyOnWriteArrayList() -// val total = 1000 -// -// val scope = getScope() -// -// val handler: ((ServiceExecutorMessage, MillisInstant) -> Unit) = { _, _ -> -// counter.incrementAndGet().let { -// // begin of task -// messageOpen.add(it) -// // emulate a 100ms task -// Thread.sleep(100) -// // enf of task -// messageClosed.add(it) -// } -// } -// // start consumers -// scope.startAsync(consumer, handler, topic, 100, true) -// // send messages -// sendMessage(topic, total, true) -// // cancel after 1s -// later(1000) { scope.cancel() } -// // wait for scope cancellation -// scope.coroutineContext.job.join() -// -// // for the test to be meaningful, all messages should not have been processed -// messageOpen.count().shouldBeLessThan(total) -// messageClosed.count().shouldBeLessThan(total) -// messageOpen.count().shouldBeExactly(messageClosed.count()) -// } }, ) diff --git a/infinitic-transport-pulsar/src/test/resources/simplelogger.properties b/infinitic-transport-pulsar/src/test/resources/simplelogger.properties index 4a7874fe7..79542251a 100644 --- a/infinitic-transport-pulsar/src/test/resources/simplelogger.properties +++ b/infinitic-transport-pulsar/src/test/resources/simplelogger.properties @@ -14,9 +14,10 @@ org.slf4j.simpleLogger.log.io.infinitic.workers.InfiniticWorker=info org.slf4j.simpleLogger.log.io.infinitic.workflows.engine.WorkflowEngine=info org.slf4j.simpleLogger.log.io.infinitic.pulsar.producers.Producer=info org.slf4j.simpleLogger.log.io.infinitic.pulsar.consumers.Consumer=info -org.slf4j.simpleLogger.log.io.infinitic.pulsar.admin.PulsarInfiniticAdmin=info -org.slf4j.simpleLogger.log.io.infinitic.pulsar.client.PulsarInfiniticClient=info +org.slf4j.simpleLogger.log.io.infinitic.pulsar.admin.InfiniticPulsarAdmin=info +org.slf4j.simpleLogger.log.io.infinitic.pulsar.client.InfiniticPulsarClient=info org.slf4j.simpleLogger.log.io.infinitic.pulsar.config.PulsarConfig=info +org.slf4j.simpleLogger.log.test=info #org.slf4j.simpleLogger.log.io.infinitic.workflows.engine.storage.LoggedWorkflowStateStorage=warn # Set to true if you want the current date and time to be included in output messages. # Default is false, and will output the number of milliseconds elapsed since startup. diff --git a/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorker.kt b/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorker.kt index debb8b9b8..2e74cf8ad 100644 --- a/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorker.kt +++ b/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorker.kt @@ -22,18 +22,11 @@ */ package io.infinitic.workers -import io.cloudevents.CloudEvent import io.github.oshai.kotlinlogging.KLogger import io.github.oshai.kotlinlogging.KotlinLogging import io.infinitic.clients.InfiniticClient -import io.infinitic.cloudEvents.logs.LOGS_SERVICE_EXECUTOR -import io.infinitic.cloudEvents.logs.LOGS_SERVICE_TAG_ENGINE -import io.infinitic.cloudEvents.logs.LOGS_WORKFLOW_EXECUTOR -import io.infinitic.cloudEvents.logs.LOGS_WORKFLOW_STATE_ENGINE -import io.infinitic.cloudEvents.logs.LOGS_WORKFLOW_TAG_ENGINE import io.infinitic.common.data.MillisInstant import io.infinitic.common.messages.Message -import io.infinitic.common.tasks.data.ServiceName import io.infinitic.common.tasks.events.messages.ServiceExecutorEventMessage import io.infinitic.common.tasks.executors.messages.ExecuteTask import io.infinitic.common.tasks.executors.messages.ServiceExecutorMessage @@ -43,7 +36,6 @@ import io.infinitic.common.transport.ServiceExecutorEventTopic import io.infinitic.common.transport.ServiceExecutorRetryTopic import io.infinitic.common.transport.ServiceExecutorTopic import io.infinitic.common.transport.ServiceTagEngineTopic -import io.infinitic.common.transport.SubscriptionType import io.infinitic.common.transport.WorkflowExecutorEventTopic import io.infinitic.common.transport.WorkflowExecutorRetryTopic import io.infinitic.common.transport.WorkflowExecutorTopic @@ -52,21 +44,14 @@ import io.infinitic.common.transport.WorkflowStateEngineTopic import io.infinitic.common.transport.WorkflowStateEventTopic import io.infinitic.common.transport.WorkflowStateTimerTopic import io.infinitic.common.transport.WorkflowTagEngineTopic -import io.infinitic.common.transport.create -import io.infinitic.common.transport.logged.LoggedInfiniticConsumer import io.infinitic.common.transport.logged.LoggedInfiniticProducer -import io.infinitic.common.transport.logged.LoggedInfiniticResources -import io.infinitic.common.workflows.data.workflows.WorkflowName import io.infinitic.common.workflows.emptyWorkflowContext -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineCmdMessage -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import io.infinitic.common.workflows.tags.messages.WorkflowTagEngineMessage -import io.infinitic.events.EventListener -import io.infinitic.events.toJsonString -import io.infinitic.events.toServiceCloudEvent -import io.infinitic.events.toWorkflowCloudEvent -import io.infinitic.logger.ignoreNull +import io.infinitic.events.CloudEventLogger +import io.infinitic.events.config.EventListenerConfig +import io.infinitic.events.listeners.startCloudEventListener import io.infinitic.tasks.Task import io.infinitic.tasks.WithTimeout import io.infinitic.tasks.executor.TaskEventHandler @@ -75,7 +60,6 @@ import io.infinitic.tasks.executor.TaskRetryHandler import io.infinitic.tasks.tag.TaskTagEngine import io.infinitic.tasks.tag.storage.LoggedTaskTagStorage import io.infinitic.workers.config.ConfigGetterInterface -import io.infinitic.workers.config.EventListenerConfig import io.infinitic.workers.config.InfiniticWorkerConfig import io.infinitic.workers.config.InfiniticWorkerConfigInterface import io.infinitic.workers.config.ServiceConfig @@ -100,11 +84,9 @@ import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Job import kotlinx.coroutines.TimeoutCancellationException import kotlinx.coroutines.cancel -import kotlinx.coroutines.delay import kotlinx.coroutines.future.future import kotlinx.coroutines.job import kotlinx.coroutines.joinAll -import kotlinx.coroutines.launch import kotlinx.coroutines.runBlocking import kotlinx.coroutines.withTimeout import java.util.concurrent.CompletableFuture @@ -125,10 +107,6 @@ class InfiniticWorker( /** Coroutine scope used to launch consumers and await their termination */ private lateinit var scope: CoroutineScope - init { - Runtime.getRuntime().addShutdownHook(Thread { close() }) - } - override fun close() { if (isStarted.compareAndSet(true, false)) runBlocking { logger.info { "Closing worker..." } @@ -257,6 +235,8 @@ class InfiniticWorker( */ fun startAsync(): CompletableFuture { if (isStarted.compareAndSet(false, true)) { + // Add close hook + Runtime.getRuntime().addShutdownHook(Thread { close() }) // create a new scope scope = CoroutineScope(Dispatchers.IO) @@ -266,24 +246,36 @@ class InfiniticWorker( val jobs = mutableListOf() config.services.forEach { serviceConfig -> - logger.info { "Service ${serviceConfig.name}:" } - // Start SERVICE TAG ENGINE - serviceConfig.tagEngine?.let { jobs.add(startServiceTagEngine(it)) } - // Start SERVICE EXECUTOR - serviceConfig.executor?.let { jobs.addAll(startServiceExecutor(it)) } + with(logger) { + info { "Service ${serviceConfig.name}:" } + // Start SERVICE TAG ENGINE + serviceConfig.tagEngine?.let { jobs.add(startServiceTagEngine(it)) } + // Start SERVICE EXECUTOR + serviceConfig.executor?.let { jobs.addAll(startServiceExecutor(it)) } + } } config.workflows.forEach { workflowConfig -> - logger.info { "Workflow ${workflowConfig.name}:" } - // Start WORKFLOW TAG ENGINE - workflowConfig.tagEngine?.let { jobs.add(startWorkflowTagEngine(it)) } - // Start WORKFLOW STATE ENGINE - workflowConfig.stateEngine?.let { jobs.addAll(startWorkflowStateEngine(it)) } - // Start WORKFLOW EXECUTOR - workflowConfig.executor?.let { jobs.addAll(startWorkflowExecutor(it)) } + with(logger) { + info { "Workflow ${workflowConfig.name}:" } + // Start WORKFLOW TAG ENGINE + workflowConfig.tagEngine?.let { jobs.add(startWorkflowTagEngine(it)) } + // Start WORKFLOW STATE ENGINE + workflowConfig.stateEngine?.let { jobs.addAll(startWorkflowStateEngine(it)) } + // Start WORKFLOW EXECUTOR + workflowConfig.executor?.let { jobs.addAll(startWorkflowExecutor(it)) } + } } - config.eventListener?.let { jobs.addAll(startEventListener(it)) } + config.eventListener?.let { + logEventListenerStart(it) + + with(logger) { + jobs.add( + consumer.startCloudEventListener(resources, it, cloudEventSourcePrefix), + ) + } + } val workerName = producer.getName() @@ -369,30 +361,31 @@ class InfiniticWorker( private val sendingMessageToDLQ = { "Unable to process message, sending to Dead Letter Queue" } + context(CoroutineScope) private suspend fun startServiceTagEngine(config: ServiceTagEngineConfig): Job { // Log Service Tag Engine configuration logServiceTagEngineStart(config) - val logsEventLogger = KotlinLogging.logger( - "$LOGS_SERVICE_TAG_ENGINE.${config.serviceName}", - ).ignoreNull() - // TASK-TAG - val logger = TaskTagEngine.logger - val loggedStorage = LoggedTaskTagStorage(logger, config.serviceTagStorage) - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - - val taskTagEngine = TaskTagEngine(loggedStorage, loggedProducer) + return with(TaskTagEngine.logger) { + val loggedStorage = LoggedTaskTagStorage(this, config.serviceTagStorage) + val loggedProducer = LoggedInfiniticProducer(this, producer) + val taskTagEngine = TaskTagEngine(loggedStorage, loggedProducer) + + val cloudEventLogger = CloudEventLogger( + ServiceTagEngineTopic, + config.serviceName, + cloudEventSourcePrefix, + beautifyLogs, + ) - val handler: suspend (ServiceTagMessage, MillisInstant) -> Unit = - { message, publishedAt -> - logsEventLogger.logServiceCloudEvent(message, publishedAt, cloudEventSourcePrefix) - taskTagEngine.handle(message, publishedAt) - } + val handler: suspend (ServiceTagMessage, MillisInstant) -> Unit = + { message, publishedAt -> + cloudEventLogger.log(message, publishedAt) + taskTagEngine.handle(message, publishedAt) + } - return with(scope) { - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(ServiceTagEngineTopic), entity = config.serviceName, concurrency = config.concurrency, @@ -402,67 +395,65 @@ class InfiniticWorker( } } + context(CoroutineScope) private suspend fun startServiceExecutor(config: ServiceExecutorConfig): List { // Log Service Executor configuration logServiceExecutorStart(config) - val logsEventLogger = KotlinLogging.logger( - "$LOGS_SERVICE_EXECUTOR.${config.serviceName}", - ).ignoreNull() - // init batch methods for current factory config.initBatchMethods() // TASK-EXECUTOR - val jobExecutor = with(scope) { - val logger = TaskExecutor.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) + val jobExecutor = with(TaskExecutor.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val taskExecutor = TaskExecutor(registry, loggedProducer, client) + val cloudEventLogger = CloudEventLogger( + ServiceExecutorTopic, + config.serviceName, + cloudEventSourcePrefix, + beautifyLogs, + ) + val handler: suspend (ServiceExecutorMessage, MillisInstant) -> Unit = { message, publishedAt -> - logsEventLogger.logServiceCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) taskExecutor.handle(message, publishedAt) } val handlerBatch: suspend (List, List) -> Unit = { messages, publishedAtList -> messages.zip(publishedAtList).forEach { (message, publishedAt) -> - logsEventLogger.logServiceCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) } taskExecutor.handleBatch(messages) } - val beforeDlq: suspend (ServiceExecutorMessage?, Exception) -> Unit = { message, cause -> + val beforeDlq: suspend (ServiceExecutorMessage, Exception) -> Unit = { message, cause -> when (message) { - null -> Unit is ExecuteTask -> with(taskExecutor) { message.sendTaskFailed(cause, Task.meta, sendingMessageToDLQ) } } } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(ServiceExecutorTopic), entity = config.serviceName, concurrency = config.concurrency, process = handler, beforeDlq = beforeDlq, - batchConfig = taskExecutor::getBatchConfig, + batchConfig = { msg -> taskExecutor.getBatchConfig(msg) }, batchProcess = handlerBatch, ) } // TASK-EXECUTOR-RETRY - val jobRetry = with(scope) { - val logger = TaskRetryHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + val jobRetry = with(TaskRetryHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val taskRetryHandler = TaskRetryHandler(loggedProducer) - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(ServiceExecutorRetryTopic), entity = config.serviceName, concurrency = config.concurrency, @@ -472,19 +463,24 @@ class InfiniticWorker( } // TASK-EVENTS - val jobEvents = with(scope) { - val logger = TaskEventHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) + val jobEvents = with(TaskEventHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val taskEventHandler = TaskEventHandler(loggedProducer) + val cloudEventLogger = CloudEventLogger( + ServiceExecutorEventTopic, + config.serviceName, + cloudEventSourcePrefix, + beautifyLogs, + ) + val handler: suspend (ServiceExecutorEventMessage, MillisInstant) -> Unit = { message, publishedAt -> - logsEventLogger.logServiceCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) taskEventHandler.handle(message, publishedAt) } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(ServiceExecutorEventTopic), entity = config.serviceName, concurrency = config.concurrency, @@ -496,30 +492,31 @@ class InfiniticWorker( return listOf(jobExecutor, jobRetry, jobEvents) } + context(CoroutineScope) private suspend fun startWorkflowTagEngine(config: WorkflowTagEngineConfig): Job { // Log Workflow State Engine configuration logWorkflowTagEngineStart(config) - val logsEventLogger = KotlinLogging.logger( - "$LOGS_WORKFLOW_TAG_ENGINE.${config.workflowName}", - ).ignoreNull() - // WORKFLOW-TAG - val logger = WorkflowTagEngine.logger - val loggedStorage = LoggedWorkflowTagStorage(logger, config.workflowTagStorage) - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - - val workflowTagEngine = WorkflowTagEngine(loggedStorage, loggedProducer) + return with(WorkflowTagEngine.logger) { + val loggedStorage = LoggedWorkflowTagStorage(this, config.workflowTagStorage) + val loggedProducer = LoggedInfiniticProducer(this, producer) + val workflowTagEngine = WorkflowTagEngine(loggedStorage, loggedProducer) + + val cloudEventLogger = CloudEventLogger( + WorkflowTagEngineTopic, + config.workflowName, + cloudEventSourcePrefix, + beautifyLogs, + ) - val handler: suspend (WorkflowTagEngineMessage, MillisInstant) -> Unit = - { message, publishedAt -> - logsEventLogger.logWorkflowCloudEvent(message, publishedAt, cloudEventSourcePrefix) - workflowTagEngine.handle(message, publishedAt) - } + val handler: suspend (WorkflowTagEngineMessage, MillisInstant) -> Unit = + { message, publishedAt -> + cloudEventLogger.log(message, publishedAt) + workflowTagEngine.handle(message, publishedAt) + } - return with(scope) { - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowTagEngineTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -529,29 +526,30 @@ class InfiniticWorker( } } + context(CoroutineScope) private suspend fun startWorkflowStateEngine(config: WorkflowStateEngineConfig): List { // Log Workflow State Engine configuration logWorkflowStateEngineStart(config) - val logsEventLogger = KotlinLogging.logger( - "$LOGS_WORKFLOW_STATE_ENGINE.${config.workflowName}", - ).ignoreNull() - - // WORKFLOW-CMD - val jobCmd = with(scope) { - val logger = WorkflowStateCmdHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + // WORKFLOW-STATE-CMD + val jobCmd = with(WorkflowStateCmdHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val workflowStateCmdHandler = WorkflowStateCmdHandler(loggedProducer) + val cloudEventLogger = CloudEventLogger( + WorkflowStateCmdTopic, + config.workflowName, + cloudEventSourcePrefix, + beautifyLogs, + ) + val handler: suspend (WorkflowStateEngineMessage, MillisInstant) -> Unit = { message, publishedAt -> - logsEventLogger.logWorkflowCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) workflowStateCmdHandler.handle(message, publishedAt) } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowStateCmdTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -561,23 +559,25 @@ class InfiniticWorker( } // WORKFLOW-STATE-ENGINE - val jobEngine = with(scope) { - val logger = WorkflowStateEngine.logger - val loggedStorage = LoggedWorkflowStateStorage(logger, config.workflowStateStorage) - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + val jobEngine = with(WorkflowStateEngine.logger) { + val loggedStorage = LoggedWorkflowStateStorage(this, config.workflowStateStorage) + val loggedProducer = LoggedInfiniticProducer(this, producer) val workflowStateEngine = WorkflowStateEngine(loggedStorage, loggedProducer) + val cloudEventLogger = CloudEventLogger( + WorkflowStateEngineTopic, + config.workflowName, + cloudEventSourcePrefix, + beautifyLogs, + ) + val handler: suspend (WorkflowStateEngineMessage, MillisInstant) -> Unit = { message, publishedAt -> - if (message !is WorkflowStateEngineCmdMessage) { - logsEventLogger.logWorkflowCloudEvent(message, publishedAt, cloudEventSourcePrefix) - } + cloudEventLogger.log(message, publishedAt) workflowStateEngine.handle(message, publishedAt) } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowStateEngineTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -586,16 +586,12 @@ class InfiniticWorker( ) } - // WORKFLOW TIMERS - val jobTimers = with(scope) { - val logger = WorkflowStateTimerHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + // WORKFLOW-STATE-TIMERS + val jobTimers = with(WorkflowStateTimerHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val workflowStateTimerHandler = WorkflowStateTimerHandler(loggedProducer) - // we do not use loggedConsumer to avoid logging twice the messages coming from delayed topics - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowStateTimerTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -604,21 +600,25 @@ class InfiniticWorker( ) } - // WORKFLOW-EVENTS - val jobEvents = with(scope) { - val logger = WorkflowStateEventHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + // WORKFLOW-STATE-EVENTS + val jobEvents = with(WorkflowStateEventHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val workflowStateEventHandler = WorkflowStateEventHandler(loggedProducer) - val handler: suspend (WorkflowStateEngineEventMessage, MillisInstant) -> Unit = + val cloudEventLogger = CloudEventLogger( + WorkflowStateEventTopic, + config.workflowName, + cloudEventSourcePrefix, + beautifyLogs, + ) + + val handler: suspend (WorkflowStateEventMessage, MillisInstant) -> Unit = { message, publishedAt -> - logsEventLogger.logWorkflowCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) workflowStateEventHandler.handle(message, publishedAt) } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowStateEventTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -630,25 +630,26 @@ class InfiniticWorker( return listOf(jobCmd, jobEngine, jobTimers, jobEvents) } + context(CoroutineScope, KLogger) private suspend fun startWorkflowExecutor(config: WorkflowExecutorConfig): List { // Log Workflow Executor configuration logWorkflowExecutorStart(config) - val logsEventLogger = KotlinLogging.logger( - "$LOGS_WORKFLOW_EXECUTOR.${config.workflowName}", - ).ignoreNull() - // WORKFLOW-EXECUTOR - val jobExecutor = with(scope) { - val logger = TaskExecutor.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + val jobExecutor = with(TaskExecutor.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val workflowTaskExecutor = TaskExecutor(registry, loggedProducer, client) + val cloudEventLogger = CloudEventLogger( + WorkflowExecutorTopic, + config.workflowName, + cloudEventSourcePrefix, + beautifyLogs, + ) + val handler: suspend (ServiceExecutorMessage, MillisInstant) -> Unit = { message, publishedAt -> - logsEventLogger.logServiceCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) workflowTaskExecutor.handle(message, publishedAt) } @@ -660,7 +661,7 @@ class InfiniticWorker( } } } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowExecutorTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -670,14 +671,11 @@ class InfiniticWorker( } // WORKFLOW-EXECUTOR-RETRY - val jobRetry = with(scope) { - val logger = TaskRetryHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) + val jobRetry = with(TaskRetryHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val taskRetryHandler = TaskRetryHandler(loggedProducer) - // we do not use loggedConsumer to avoid logging twice the messages coming from delayed topics - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowExecutorRetryTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -687,20 +685,24 @@ class InfiniticWorker( } // WORKFLOW-EXECUTOR-EVENT - val jobEvents = with(scope) { - val logger = TaskEventHandler.logger - val loggedConsumer = LoggedInfiniticConsumer(logger, consumer) - val loggedProducer = LoggedInfiniticProducer(logger, producer) - + val jobEvents = with(TaskEventHandler.logger) { + val loggedProducer = LoggedInfiniticProducer(this, producer) val workflowTaskEventHandler = TaskEventHandler(loggedProducer) + val cloudEventLogger = CloudEventLogger( + WorkflowExecutorEventTopic, + config.workflowName, + cloudEventSourcePrefix, + beautifyLogs, + ) + val handler: suspend (ServiceExecutorEventMessage, MillisInstant) -> Unit = { message, publishedAt -> - logsEventLogger.logServiceCloudEvent(message, publishedAt, cloudEventSourcePrefix) + cloudEventLogger.log(message, publishedAt) workflowTaskEventHandler.handle(message, publishedAt) } - loggedConsumer.startAsync( + consumer.startAsync( subscription = MainSubscription(WorkflowExecutorEventTopic), entity = config.workflowName, concurrency = config.concurrency, @@ -712,276 +714,7 @@ class InfiniticWorker( return listOf(jobExecutor, jobRetry, jobEvents) } - private fun CoroutineScope.startEventListener(config: EventListenerConfig): List { - logEventListenerStart(config) - - val jobServices = checkNewServices(config) { serviceName -> - logger.info { "EventListener starting listening Service $serviceName" } - - startServiceEventListener( - ServiceName(serviceName), - config.concurrency, - config.subscriptionName, - SubscriptionType.EVENT_LISTENER, - ) { message: Message, publishedAt: MillisInstant -> - message.toServiceCloudEvent(publishedAt, cloudEventSourcePrefix)?.let { cloudEvent -> - config.listener.onEvent(cloudEvent) - } - }.joinAll() - } - - val jobWorkflows = checkNewWorkflows(config) { workflowName -> - logger.info { "EventListener starting listening Workflow $workflowName" } - val jobs = mutableListOf() - jobs.addAll( - startWorkflowExecutorEventListener( - WorkflowName(workflowName), - config.concurrency, - config.subscriptionName, - SubscriptionType.EVENT_LISTENER, - ) { message, publishedAt -> - message.toServiceCloudEvent(publishedAt, cloudEventSourcePrefix)?.let { cloudEvent -> - config.listener.onEvent(cloudEvent) - } - }, - ) - jobs.addAll( - startWorkflowStateEventListener( - WorkflowName(workflowName), - config.concurrency, - config.subscriptionName, - SubscriptionType.EVENT_LISTENER, - ) { message, publishedAt -> - message.toWorkflowCloudEvent(publishedAt, cloudEventSourcePrefix)?.let { cloudEvent -> - config.listener.onEvent(cloudEvent) - } - }, - ) - jobs.joinAll() - } - - return listOf(jobServices, jobWorkflows) - } - - private fun CoroutineScope.checkNewServices( - config: EventListenerConfig, - starter: suspend (String) -> Unit - ) = launch { - val processedServices = mutableSetOf() - val loggedResources = LoggedInfiniticResources(EventListener.logger, resources) - - while (true) { - // Retrieve the list of services - loggedResources.getServices().onSuccess { services -> - val currentServices = services.filter { config.includeService(it) } - - // Determine new services that haven't been processed - val newServices = currentServices.filterNot { it in processedServices } - - // Launch starter for each new service - for (service in newServices) { - starter(service) - // Add the service to the set of processed services - processedServices.add(service) - } - } - - delay((config.refreshDelaySeconds * 1000).toLong()) - } - } - - private fun CoroutineScope.checkNewWorkflows( - config: EventListenerConfig, - starter: suspend (String) -> Unit - ) = launch { - val processedWorkflows = mutableSetOf() - val loggedResources = LoggedInfiniticResources(EventListener.logger, resources) - - while (true) { - // Retrieve the list of workflows - loggedResources.getWorkflows().onSuccess { workflows -> - val currentWorkflows = workflows.filter { config.includeWorkflow(it) } - - // Determine new workflows that haven't been processed - val newWorkflows = currentWorkflows.filterNot { it in processedWorkflows } - - // Launch starter for each new workflow - for (workflow in newWorkflows) { - starter(workflow) - // Add the workflow to the set of processed workflows - processedWorkflows.add(workflow) - } - } - - delay((config.refreshDelaySeconds * 1000).toLong()) - } - } - private val logMessageSentToDLQ = { message: Message?, e: Exception -> logger.error(e) { "Sending message to DLQ ${message ?: "(Not Deserialized)"}" } } - - private suspend fun startServiceEventListener( - serviceName: ServiceName, - concurrency: Int, - subscriptionName: String?, - subscriptionType: SubscriptionType, - handler: (Message, MillisInstant) -> Unit - ): List { - val loggedConsumer = LoggedInfiniticConsumer(EventListener.logger, consumer) - - // TASK-EXECUTOR topic - val jobExecutor = with(scope) { - loggedConsumer.startAsync( - subscription = subscriptionType.create(ServiceExecutorTopic, subscriptionName), - entity = serviceName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - // TASK-RETRY topic - val jobRetry = with(scope) { - loggedConsumer.startAsync( - subscription = subscriptionType.create(ServiceExecutorRetryTopic, subscriptionName), - entity = serviceName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - // TASK-EVENTS topic - val jobEvents = with(scope) { - loggedConsumer.startAsync( - subscription = subscriptionType.create(ServiceExecutorEventTopic, subscriptionName), - entity = serviceName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - - return listOf(jobExecutor, jobRetry, jobEvents) - } - - private suspend fun startWorkflowExecutorEventListener( - workflowName: WorkflowName, - concurrency: Int, - subscriptionName: String?, - subscriptionType: SubscriptionType, - handler: (Message, MillisInstant) -> Unit - ): List { - val loggedConsumer = LoggedInfiniticConsumer(EventListener.logger, consumer) - - // WORKFLOW-TASK-EXECUTOR topic - val jobExecutor = with(scope) { - loggedConsumer.startAsync( - subscription = subscriptionType.create(WorkflowExecutorTopic, subscriptionName), - entity = workflowName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - // WORKFLOW-TASK-RETRY topic - val jobRetry = with(scope) { - loggedConsumer.startAsync( - subscription = subscriptionType.create(WorkflowExecutorRetryTopic, subscriptionName), - entity = workflowName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - // WORKFLOW-TASK-EVENTS topic - val jobEvents = with(scope) { - loggedConsumer.startAsync( - subscription = subscriptionType.create(WorkflowExecutorEventTopic, subscriptionName), - entity = workflowName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - return listOf(jobExecutor, jobRetry, jobEvents) - } - - private suspend fun startWorkflowStateEventListener( - workflowName: WorkflowName, - concurrency: Int, - subscriptionName: String?, - subscriptionType: SubscriptionType, - handler: (Message, MillisInstant) -> Unit - ): List { - // WORKFLOW-CMD topic - val jobWorkflowCmd = with(scope) { - consumer.startAsync( - subscription = subscriptionType.create(WorkflowStateCmdTopic, subscriptionName), - entity = workflowName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - - // WORKFLOW-STATE-ENGINE topic - val jobWorkflowEngine = with(scope) { - consumer.startAsync( - subscription = subscriptionType.create(WorkflowStateEngineTopic, subscriptionName), - entity = workflowName.toString(), - concurrency = concurrency, - process = { message: Message, publishedAt: MillisInstant -> - // the event handler is not applied for WorkflowCmdMessage from clients - // as the event has already been handled in the workflow-cmd topic - if (message !is WorkflowStateEngineCmdMessage) handler(message, publishedAt) - }, - beforeDlq = logMessageSentToDLQ, - ) - } - - // WORKFLOW-EVENTS topic - val jobWorkflowEvents = with(scope) { - consumer.startAsync( - subscription = subscriptionType.create(WorkflowStateEventTopic, subscriptionName), - entity = workflowName.toString(), - concurrency = concurrency, - process = handler, - beforeDlq = logMessageSentToDLQ, - ) - } - - return listOf(jobWorkflowCmd, jobWorkflowEngine, jobWorkflowEvents) - } - - private fun KLogger.logCloudEvent( - message: Message, - publishedAt: MillisInstant, - prefix: String, - eventProducer: Message.(MillisInstant, String) -> CloudEvent? - ) { - try { - debug { - message.eventProducer(publishedAt, prefix)?.toJsonString(beautifyLogs) - } - } catch (e: Exception) { - // Failure to log shouldn't break the application - try { - error(e) { "Error while logging the CloudEvent json of: $message" } - } catch (error: Exception) { - System.err.println("Failed to log the original exception due to ${error.message}\n${error.stackTraceToString()}") - } - } - } - - private fun KLogger.logWorkflowCloudEvent( - message: Message, - publishedAt: MillisInstant, - prefix: String, - ) = logCloudEvent(message, publishedAt, prefix, Message::toWorkflowCloudEvent) - - private fun KLogger.logServiceCloudEvent( - message: Message, - publishedAt: MillisInstant, - prefix: String, - ) = logCloudEvent(message, publishedAt, prefix, Message::toServiceCloudEvent) } diff --git a/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorkerBuilder.kt b/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorkerBuilder.kt index fbbfff96e..34f253f3b 100644 --- a/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorkerBuilder.kt +++ b/infinitic-worker/src/main/kotlin/io/infinitic/workers/InfiniticWorkerBuilder.kt @@ -24,9 +24,9 @@ package io.infinitic.workers +import io.infinitic.events.config.EventListenerConfig import io.infinitic.storage.config.StorageConfig import io.infinitic.transport.config.TransportConfig -import io.infinitic.workers.config.EventListenerConfig import io.infinitic.workers.config.InfiniticWorkerConfig import io.infinitic.workers.config.LogsConfig import io.infinitic.workers.config.ServiceConfig diff --git a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/ConfigGetterInterface.kt b/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/ConfigGetterInterface.kt index 0a78392ae..8b999c91e 100644 --- a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/ConfigGetterInterface.kt +++ b/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/ConfigGetterInterface.kt @@ -22,6 +22,8 @@ */ package io.infinitic.workers.config +import io.infinitic.events.config.EventListenerConfig + interface ConfigGetterInterface { fun getEventListenerConfig(): EventListenerConfig? diff --git a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfig.kt b/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfig.kt index 5ccc826ec..85acd0114 100644 --- a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfig.kt +++ b/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfig.kt @@ -26,6 +26,7 @@ package io.infinitic.workers.config import io.infinitic.config.loadFromYamlFile import io.infinitic.config.loadFromYamlResource import io.infinitic.config.loadFromYamlString +import io.infinitic.events.config.EventListenerConfig import io.infinitic.storage.config.StorageConfig import io.infinitic.transport.config.TransportConfig diff --git a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfigInterface.kt b/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfigInterface.kt index 7b677681f..18251b18e 100644 --- a/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfigInterface.kt +++ b/infinitic-worker/src/main/kotlin/io/infinitic/workers/config/InfiniticWorkerConfigInterface.kt @@ -24,10 +24,11 @@ package io.infinitic.workers.config import io.infinitic.clients.config.InfiniticClientConfigInterface +import io.infinitic.events.config.EventListenerConfig import io.infinitic.storage.config.StorageConfig @Suppress("unused") -interface InfiniticWorkerConfigInterface: InfiniticClientConfigInterface { +interface InfiniticWorkerConfigInterface : InfiniticClientConfigInterface { /** Default storage */ val storage: StorageConfig? diff --git a/infinitic-worker/src/test/java/io/infinitic/workers/JavaInfiniticWorkerTests.java b/infinitic-worker/src/test/java/io/infinitic/workers/JavaInfiniticWorkerTests.java index 68ca12c63..5ba287c30 100644 --- a/infinitic-worker/src/test/java/io/infinitic/workers/JavaInfiniticWorkerTests.java +++ b/infinitic-worker/src/test/java/io/infinitic/workers/JavaInfiniticWorkerTests.java @@ -23,6 +23,7 @@ package io.infinitic.workers; import io.infinitic.common.workers.config.WithExponentialBackoffRetry; +import io.infinitic.events.config.EventListenerConfig; import io.infinitic.storage.config.PostgresStorageConfig; import io.infinitic.storage.config.StorageConfig; import io.infinitic.transport.config.PulsarTransportConfig; diff --git a/infinitic-worker/src/test/kotlin/io/infinitic/workers/InfiniticWorkerTests.kt b/infinitic-worker/src/test/kotlin/io/infinitic/workers/InfiniticWorkerTests.kt index daeb9ed0a..d66a5a268 100644 --- a/infinitic-worker/src/test/kotlin/io/infinitic/workers/InfiniticWorkerTests.kt +++ b/infinitic-worker/src/test/kotlin/io/infinitic/workers/InfiniticWorkerTests.kt @@ -25,11 +25,11 @@ package io.infinitic.workers import io.cloudevents.CloudEvent import io.infinitic.cloudEvents.CloudEventListener import io.infinitic.common.fixtures.later +import io.infinitic.events.config.EventListenerConfig import io.infinitic.storage.config.InMemoryConfig import io.infinitic.storage.config.InMemoryStorageConfig import io.infinitic.storage.config.MySQLConfig import io.infinitic.transport.config.InMemoryTransportConfig -import io.infinitic.workers.config.EventListenerConfig import io.infinitic.workers.config.InfiniticWorkerConfig import io.infinitic.workers.config.ServiceExecutorConfig import io.infinitic.workers.config.ServiceTagEngineConfig @@ -51,7 +51,7 @@ internal class InfiniticWorkerTests : StringSpec( val transport = InMemoryTransportConfig() class TestEventListener : CloudEventListener { - override fun onEvent(event: CloudEvent) {} + override fun onEvents(cloudEvents: List) {} } val eventListener = EventListenerConfig.builder() @@ -97,6 +97,7 @@ internal class InfiniticWorkerTests : StringSpec( .setTransport(transport) .setEventListener(eventListener) .build() + } worker.getEventListenerConfig() shouldBe eventListener } @@ -152,6 +153,7 @@ services: .build() } worker.getServiceTagEngineConfig(serviceName) shouldBe serviceTagEngine + worker.close() } "Can create Infinitic Worker as Service Tag Engine through Yaml" { @@ -422,6 +424,7 @@ workflows: } worker.startAsync() flag shouldBe false + worker.close() } "start() should block, and be released when closed" { @@ -436,7 +439,7 @@ workflows: .build() var flag = false - later(2000) { + later(1000) { flag = true worker.close() } diff --git a/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/ConfigGetterInterfaceTests.kt b/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/ConfigGetterInterfaceTests.kt index cb79494f7..3fed0efda 100644 --- a/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/ConfigGetterInterfaceTests.kt +++ b/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/ConfigGetterInterfaceTests.kt @@ -24,6 +24,7 @@ package io.infinitic.workers.config import io.cloudevents.CloudEvent import io.infinitic.cloudEvents.CloudEventListener +import io.infinitic.events.config.EventListenerConfig import io.infinitic.storage.config.InMemoryConfig import io.infinitic.storage.config.InMemoryStorageConfig import io.infinitic.transport.config.InMemoryTransportConfig @@ -38,7 +39,7 @@ import io.kotest.matchers.shouldBe internal class ConfigGetterInterfaceTests : StringSpec( { class TestEventListener : CloudEventListener { - override fun onEvent(event: CloudEvent) {} + override fun onEvents(cloudEvents: List) {} } val transport = InMemoryTransportConfig() diff --git a/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/EventListenerConfigTests.kt b/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/EventListenerConfigTests.kt index 75dab9d58..0e27c1cd0 100644 --- a/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/EventListenerConfigTests.kt +++ b/infinitic-worker/src/test/kotlin/io/infinitic/workers/config/EventListenerConfigTests.kt @@ -24,7 +24,9 @@ package io.infinitic.workers.config import io.cloudevents.CloudEvent import io.infinitic.cloudEvents.CloudEventListener +import io.infinitic.cloudEvents.EntityListConfig import io.infinitic.common.utils.annotatedName +import io.infinitic.events.config.EventListenerConfig import io.infinitic.workers.samples.ServiceA import io.infinitic.workers.samples.ServiceAImpl import io.infinitic.workers.samples.WorkflowA @@ -36,7 +38,9 @@ import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf internal class TestEventListener : CloudEventListener { - override fun onEvent(event: CloudEvent) {} + override fun onEvents(cloudEvents: List) { + // do nothing + } } internal class EventListenerConfigTests : StringSpec( @@ -54,11 +58,8 @@ internal class EventListenerConfigTests : StringSpec( config.listener shouldBe listener config.concurrency shouldBe 1 config.subscriptionName shouldBe null - config.refreshDelaySeconds shouldBe 60.0 - config.allowedServices shouldBe null - config.allowedWorkflows shouldBe null - config.disallowedServices.size shouldBe 0 - config.disallowedWorkflows.size shouldBe 0 + config.serviceListConfig shouldBe EntityListConfig(60.0, null, listOf()) + config.workflowListConfig shouldBe EntityListConfig(60.0, null, listOf()) } "Can create EventListenerConfig through Yaml with default parameters" { @@ -74,11 +75,8 @@ class: ${TestEventListener::class.java.name} config.listener::class shouldBe TestEventListener::class config.concurrency shouldBe 1 config.subscriptionName shouldBe null - config.refreshDelaySeconds shouldBe 60.0 - config.allowedServices shouldBe null - config.allowedWorkflows shouldBe null - config.disallowedServices.size shouldBe 0 - config.disallowedWorkflows.size shouldBe 0 + config.serviceListConfig shouldBe EntityListConfig(60.0, null, listOf()) + config.workflowListConfig shouldBe EntityListConfig(60.0, null, listOf()) } "Can create EventListenerConfig through builder with all parameters" { @@ -87,7 +85,8 @@ class: ${TestEventListener::class.java.name} .setListener(listener) .setConcurrency(10) .setSubscriptionName("subscriptionName") - .setRefreshDelaySeconds(10.0) + .setServiceListRefreshSeconds(10.0) + .setWorkflowListRefreshSeconds(20.0) .allowServices("service1", "service2") .allowServices("service3") .allowServices(ServiceA::class.java) @@ -107,30 +106,15 @@ class: ${TestEventListener::class.java.name} config.listener shouldBe listener config.concurrency shouldBe 10 config.subscriptionName shouldBe "subscriptionName" - config.refreshDelaySeconds shouldBe 10.0 - config.allowedServices shouldBe listOf( - "service1", - "service2", - "service3", - ServiceA::class.java.annotatedName, + config.serviceListConfig shouldBe EntityListConfig( + 10.0, + listOf("service1", "service2", "service3", ServiceA::class.java.annotatedName), + listOf("service4", "service5", "service6", ServiceA::class.java.annotatedName), ) - config.allowedWorkflows shouldBe listOf( - "workflow1", - "workflow2", - "workflow3", - WorkflowA::class.java.annotatedName, - ) - config.disallowedServices shouldBe listOf( - "service4", - "service5", - "service6", - ServiceA::class.java.annotatedName, - ) - config.disallowedWorkflows shouldBe listOf( - "workflow4", - "workflow5", - "workflow6", - WorkflowA::class.java.annotatedName, + config.workflowListConfig shouldBe EntityListConfig( + 20.0, + listOf("workflow1", "workflow2", "workflow3", WorkflowA::class.java.annotatedName), + listOf("workflow4", "workflow5", "workflow6", WorkflowA::class.java.annotatedName), ) } @@ -143,6 +127,7 @@ concurrency: 10 subscriptionName: subscriptionName refreshDelaySeconds: 10 services: + listRefreshSeconds: 10 allow: - service1 - service2 @@ -152,6 +137,7 @@ services: - service5 - service6 workflows: + listRefreshSeconds: 20 allow: - workflow1 - workflow2 @@ -167,11 +153,16 @@ workflows: config.listener::class shouldBe TestEventListener::class config.concurrency shouldBe 10 config.subscriptionName shouldBe "subscriptionName" - config.refreshDelaySeconds shouldBe 10.0 - config.allowedServices shouldBe listOf("service1", "service2", "service3") - config.allowedWorkflows shouldBe listOf("workflow1", "workflow2", "workflow3") - config.disallowedServices shouldBe listOf("service4", "service5", "service6") - config.disallowedWorkflows shouldBe listOf("workflow4", "workflow5", "workflow6") + config.serviceListConfig shouldBe EntityListConfig( + 10.0, + listOf("service1", "service2", "service3"), + listOf("service4", "service5", "service6"), + ) + config.workflowListConfig shouldBe EntityListConfig( + 20.0, + listOf("workflow1", "workflow2", "workflow3"), + listOf("workflow4", "workflow5", "workflow6"), + ) } "Listener not implementing CloudEventListener should throw exception" { diff --git a/infinitic-worker/src/test/kotlin/io/infinitic/workers/samples/EventListenerImpl.kt b/infinitic-worker/src/test/kotlin/io/infinitic/workers/samples/EventListenerImpl.kt index 1329b8bb1..da740afdb 100644 --- a/infinitic-worker/src/test/kotlin/io/infinitic/workers/samples/EventListenerImpl.kt +++ b/infinitic-worker/src/test/kotlin/io/infinitic/workers/samples/EventListenerImpl.kt @@ -27,13 +27,13 @@ import io.infinitic.cloudEvents.CloudEventListener internal class EventListenerImpl : CloudEventListener { - override fun onEvent(event: CloudEvent) { + override fun onEvents(cloudEvents: List) { // do nothing } } internal class EventListenerFake : CloudEventListener { - override fun onEvent(event: CloudEvent) { + override fun onEvents(cloudEvents: List) { // do nothing } } diff --git a/infinitic-worker/src/test/resources/simplelogger.properties b/infinitic-worker/src/test/resources/simplelogger.properties index 8858a8cc1..fe3fd44f4 100644 --- a/infinitic-worker/src/test/resources/simplelogger.properties +++ b/infinitic-worker/src/test/resources/simplelogger.properties @@ -5,7 +5,7 @@ # Default logging detail level for all instances of SimpleLogger. # Must be one of ("trace", "debug", "info", "warn", or "error"). # If not specified, defaults to "info". -org.slf4j.simpleLogger.defaultLogLevel=warn +org.slf4j.simpleLogger.defaultLogLevel=info # Logging detail level for a SimpleLogger instance named "xxxxx". # Must be one of ("trace", "debug", "info", "warn", or "error"). # If not specified, the default logging detail level is used. diff --git a/infinitic-workflow-engine/src/main/kotlin/io/infinitic/workflows/engine/WorkflowStateEventHandler.kt b/infinitic-workflow-engine/src/main/kotlin/io/infinitic/workflows/engine/WorkflowStateEventHandler.kt index cdff8f459..69a426e78 100644 --- a/infinitic-workflow-engine/src/main/kotlin/io/infinitic/workflows/engine/WorkflowStateEventHandler.kt +++ b/infinitic-workflow-engine/src/main/kotlin/io/infinitic/workflows/engine/WorkflowStateEventHandler.kt @@ -41,7 +41,7 @@ import io.infinitic.common.workflows.engine.messages.TaskDispatchedEvent import io.infinitic.common.workflows.engine.messages.TimerDispatchedEvent import io.infinitic.common.workflows.engine.messages.WorkflowCanceledEvent import io.infinitic.common.workflows.engine.messages.WorkflowCompletedEvent -import io.infinitic.common.workflows.engine.messages.WorkflowStateEngineEventMessage +import io.infinitic.common.workflows.engine.messages.WorkflowStateEventMessage import kotlinx.coroutines.coroutineScope import kotlinx.coroutines.launch @@ -49,7 +49,7 @@ class WorkflowStateEventHandler(val producer: InfiniticProducer) { private suspend fun getEmitterName() = EmitterName(producer.getName()) - suspend fun handle(msg: WorkflowStateEngineEventMessage, publishTime: MillisInstant) { + suspend fun handle(msg: WorkflowStateEventMessage, publishTime: MillisInstant) { when (msg) { is WorkflowCanceledEvent -> Unit is WorkflowCompletedEvent -> Unit