From 0e3b3e3308c43ad1d7df8957a5bbaefb27e6ac81 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 16 Jul 2021 20:12:47 +0100 Subject: [PATCH 001/162] Drop Scala 2.12 --- build.sbt | 28 +++++------------- .../fs2/kafka/internal/converters.scala | 14 --------- .../fs2/kafka/internal/converters.scala | 16 ---------- .../src/main/scala/fs2/kafka/Headers.scala | 2 +- .../scala/fs2/kafka/KafkaAdminClient.scala | 2 +- .../main/scala/fs2/kafka/KafkaConsumer.scala | 17 ++++++----- .../main/scala/fs2/kafka/KafkaProducer.scala | 12 ++++---- .../kafka/TransactionalKafkaProducer.scala | 8 ++--- .../scala/fs2/kafka/admin/MkAdminClient.scala | 2 +- .../scala/fs2/kafka/consumer/MkConsumer.scala | 2 +- .../kafka/internal/KafkaConsumerActor.scala | 5 ++-- .../fs2/kafka/internal/WithAdminClient.scala | 3 +- .../fs2/kafka/internal/WithConsumer.scala | 4 +-- .../fs2/kafka/internal/WithProducer.scala | 6 ++-- .../scala/fs2/kafka/internal/syntax.scala | 29 +++---------------- .../scala/fs2/kafka/producer/MkProducer.scala | 2 +- .../test/scala/fs2/kafka/BaseKafkaSpec.scala | 2 +- .../test/scala/fs2/kafka/HeadersSpec.scala | 2 +- .../scala/fs2/kafka/KafkaConsumerSpec.scala | 2 +- .../TransactionalKafkaProducerSpec.scala | 2 +- .../scala/fs2/kafka/internal/SyntaxSpec.scala | 11 ------- .../scala/fs2/kafka/vulcan/AvroSettings.scala | 2 +- .../vulcan/SchemaRegistryClientSettings.scala | 2 +- 23 files changed, 51 insertions(+), 124 deletions(-) delete mode 100644 modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala delete mode 100644 modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala diff --git a/build.sbt b/build.sbt index 3a7095c62..006895565 100644 --- a/build.sbt +++ b/build.sbt @@ -12,9 +12,7 @@ val testcontainersScalaVersion = "0.39.5" val vulcanVersion = "1.7.1" -val scala212 = "2.12.14" - -val scala213 = "2.13.6" +val scala2 = "2.13.6" val scala3 = "3.0.1" @@ -181,7 +179,7 @@ ThisBuild / githubWorkflowTargetBranches := Seq("series/*") ThisBuild / githubWorkflowBuild := Seq( WorkflowStep.Sbt(List("ci")), - WorkflowStep.Sbt(List("docs/run"), cond = Some(s"matrix.scala == '$scala213'")) + WorkflowStep.Sbt(List("docs/run"), cond = Some(s"matrix.scala == '$scala2'")) ) ThisBuild / githubWorkflowArtifactUpload := false @@ -251,8 +249,8 @@ lazy val noPublishSettings = publishArtifact := false ) -ThisBuild / scalaVersion := scala213 -ThisBuild / crossScalaVersions := Seq(scala212, scala213, scala3) +ThisBuild / scalaVersion := scala2 +ThisBuild / crossScalaVersions := Seq(scala2, scala3) lazy val scalaSettings = Seq( scalacOptions ++= Seq( @@ -263,26 +261,14 @@ lazy val scalaSettings = Seq( "-language:implicitConversions", "-unchecked" ) ++ ( - if (scalaVersion.value.startsWith("2.13")) - Seq( - "-language:higherKinds", - "-Xlint", - "-Ywarn-dead-code", - "-Ywarn-numeric-widen", - "-Ywarn-value-discard", - "-Ywarn-unused", - "-Xfatal-warnings" - ) - else if (scalaVersion.value.startsWith("2.12")) + if (scalaVersion.value.startsWith("2")) Seq( "-language:higherKinds", "-Xlint", - "-Yno-adapted-args", "-Ywarn-dead-code", "-Ywarn-numeric-widen", "-Ywarn-value-discard", "-Ywarn-unused", - "-Ypartial-unification", "-Xfatal-warnings" ) else @@ -367,7 +353,7 @@ addCommandsAlias( List( "+clean", "+test", - "+mimaReportBinaryIssues", + //"+mimaReportBinaryIssues", "+scalafmtCheck", "scalafmtSbtCheck", "+headerCheck", @@ -381,7 +367,7 @@ addCommandsAlias( List( "clean", "test", - "mimaReportBinaryIssues", + // "mimaReportBinaryIssues", "scalafmtCheck", "scalafmtSbtCheck", "headerCheck", diff --git a/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala b/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala deleted file mode 100644 index 0a6d947b3..000000000 --- a/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright 2018-2021 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka.internal - -private[kafka] object converters { - val collection = scala.collection.JavaConverters - - def unsafeWrapArray[A](array: Array[A]): Seq[A] = - array -} diff --git a/modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala b/modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala deleted file mode 100644 index 32e41a71a..000000000 --- a/modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright 2018-2021 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka.internal - -import scala.collection.immutable.ArraySeq - -private[kafka] object converters { - val collection = scala.jdk.CollectionConverters - - def unsafeWrapArray[A](array: Array[A]): Seq[A] = - ArraySeq.unsafeWrapArray(array) -} diff --git a/modules/core/src/main/scala/fs2/kafka/Headers.scala b/modules/core/src/main/scala/fs2/kafka/Headers.scala index 360ee37d0..952a7aebe 100644 --- a/modules/core/src/main/scala/fs2/kafka/Headers.scala +++ b/modules/core/src/main/scala/fs2/kafka/Headers.scala @@ -8,7 +8,7 @@ package fs2.kafka import cats.data.{Chain, NonEmptyChain} import cats.{Eq, Show} -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import fs2.kafka.internal.syntax._ /** diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index 927b2aaa6..743e4c00e 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -12,7 +12,7 @@ import fs2.Stream import fs2.kafka.KafkaAdminClient._ import fs2.kafka.admin.MkAdminClient import fs2.kafka.internal.WithAdminClient -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import fs2.kafka.internal.syntax._ import org.apache.kafka.clients.admin._ import org.apache.kafka.clients.consumer.OffsetAndMetadata diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala index c88217438..712e82c12 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala @@ -14,7 +14,8 @@ import cats.effect.implicits._ import cats.syntax.all._ import fs2.{Chunk, Stream} import fs2.kafka.internal._ -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ +import scala.jdk.DurationConverters._ import fs2.kafka.instances._ import fs2.kafka.internal.KafkaConsumerActor._ import fs2.kafka.internal.syntax._ @@ -438,13 +439,13 @@ object KafkaConsumer { topic: String, timeout: FiniteDuration ): F[List[PartitionInfo]] = - withConsumer.blocking { _.partitionsFor(topic, timeout.asJava).asScala.toList } + withConsumer.blocking { _.partitionsFor(topic, timeout.toJava).asScala.toList } override def position(partition: TopicPartition): F[Long] = withConsumer.blocking { _.position(partition) } override def position(partition: TopicPartition, timeout: FiniteDuration): F[Long] = - withConsumer.blocking { _.position(partition, timeout.asJava) } + withConsumer.blocking { _.position(partition, timeout.toJava) } override def subscribeTo(firstTopic: String, remainingTopics: String*): F[Unit] = subscribe(NonEmptyList.of(firstTopic, remainingTopics: _*)) @@ -511,7 +512,7 @@ object KafkaConsumer { timeout: FiniteDuration ): F[Map[TopicPartition, Long]] = withConsumer.blocking { - _.beginningOffsets(partitions.asJava, timeout.asJava) + _.beginningOffsets(partitions.asJava, timeout.toJava) .asInstanceOf[util.Map[TopicPartition, Long]] .toMap } @@ -530,7 +531,7 @@ object KafkaConsumer { timeout: FiniteDuration ): F[Map[TopicPartition, Long]] = withConsumer.blocking { - _.endOffsets(partitions.asJava, timeout.asJava) + _.endOffsets(partitions.asJava, timeout.toJava) .asInstanceOf[util.Map[TopicPartition, Long]] .toMap } @@ -617,7 +618,7 @@ object KafkaConsumer { def stream[F[_], K, V]( settings: ConsumerSettings[F, K, V] )(implicit F: Async[F], mk: MkConsumer[F]): Stream[F, KafkaConsumer[F, K, V]] = - Stream.resource(resource(settings)(F, mk)) + Stream.resource(resource(settings)) def apply[F[_]]: ConsumerPartiallyApplied[F] = new ConsumerPartiallyApplied() @@ -639,7 +640,7 @@ object KafkaConsumer { implicit F: Async[F], mk: MkConsumer[F] ): Resource[F, KafkaConsumer[F, K, V]] = - KafkaConsumer.resource(settings)(F, mk) + KafkaConsumer.resource(settings) /** * Alternative version of `stream` where the `F[_]` is @@ -655,7 +656,7 @@ object KafkaConsumer { implicit F: Async[F], mk: MkConsumer[F] ): Stream[F, KafkaConsumer[F, K, V]] = - KafkaConsumer.stream(settings)(F, mk) + KafkaConsumer.stream(settings) override def toString: String = "ConsumerPartiallyApplied$" + System.identityHashCode(this) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index 179c5a9a9..37481f833 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -11,7 +11,7 @@ import cats.effect._ import cats.implicits._ import fs2._ import fs2.kafka.internal._ -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.producer.RecordMetadata import org.apache.kafka.common.{Metric, MetricName} import fs2.kafka.producer.MkProducer @@ -135,7 +135,7 @@ object KafkaProducer { def resource[F[_], K, V]( settings: ProducerSettings[F, K, V] )(implicit F: Async[F], mk: MkProducer[F]): Resource[F, KafkaProducer.Metrics[F, K, V]] = - KafkaProducerConnection.resource(settings)(F, mk).evalMap(_.withSerializersFrom(settings)) + KafkaProducerConnection.resource(settings).evalMap(_.withSerializersFrom(settings)) private[kafka] def from[F[_]: Async, K, V]( withProducer: WithProducer[F], @@ -173,7 +173,7 @@ object KafkaProducer { def stream[F[_], K, V]( settings: ProducerSettings[F, K, V] )(implicit F: Async[F], mk: MkProducer[F]): Stream[F, KafkaProducer.Metrics[F, K, V]] = - Stream.resource(KafkaProducer.resource(settings)(F, mk)) + Stream.resource(KafkaProducer.resource(settings)) private[kafka] def produceRecord[F[_], K, V]( keySerializer: Serializer[F, K], @@ -209,7 +209,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Pipe[F, ProducerRecords[P, K, V], ProducerResult[P, K, V]] = - records => stream(settings)(F, mk).flatMap(pipe(settings, _).apply(records)) + records => stream(settings).flatMap(pipe(settings, _).apply(records)) /** * Produces records in batches using the provided [[KafkaProducer]]. @@ -273,7 +273,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Resource[F, KafkaProducer[F, K, V]] = - KafkaProducer.resource(settings)(F, mk) + KafkaProducer.resource(settings) /** * Alternative version of `stream` where the `F[_]` is @@ -289,7 +289,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Stream[F, KafkaProducer[F, K, V]] = - KafkaProducer.stream(settings)(F, mk) + KafkaProducer.stream(settings) override def toString: String = "ProducerPartiallyApplied$" + System.identityHashCode(this) diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 58d8daf32..85ece9be5 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -11,7 +11,7 @@ import cats.effect.syntax.all._ import cats.syntax.all._ import fs2.{Chunk, Stream} import fs2.kafka.internal._ -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.producer.RecordMetadata import org.apache.kafka.common.{Metric, MetricName} import fs2.kafka.producer.MkProducer @@ -151,7 +151,7 @@ object TransactionalKafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Stream[F, TransactionalKafkaProducer.Metrics[F, K, V]] = - Stream.resource(resource(settings)(F, mk)) + Stream.resource(resource(settings)) def apply[F[_]]: TransactionalProducerPartiallyApplied[F] = new TransactionalProducerPartiallyApplied @@ -173,7 +173,7 @@ object TransactionalKafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Resource[F, TransactionalKafkaProducer.Metrics[F, K, V]] = - TransactionalKafkaProducer.resource(settings)(F, mk) + TransactionalKafkaProducer.resource(settings) /** * Alternative version of `stream` where the `F[_]` is @@ -189,7 +189,7 @@ object TransactionalKafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Stream[F, TransactionalKafkaProducer.Metrics[F, K, V]] = - TransactionalKafkaProducer.stream(settings)(F, mk) + TransactionalKafkaProducer.stream(settings) override def toString: String = "TransactionalProducerPartiallyApplied$" + System.identityHashCode(this) diff --git a/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala index aede8d8cf..22322161e 100644 --- a/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala @@ -9,7 +9,7 @@ package fs2.kafka.admin import cats.effect.Sync import fs2.kafka.AdminClientSettings import org.apache.kafka.clients.admin.AdminClient -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ /** * A capability trait representing the ability to instantiate the Java diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala b/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala index 098ba16f1..190a6498b 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala @@ -8,7 +8,7 @@ package fs2.kafka.consumer import cats.effect.Sync import fs2.kafka.{ConsumerSettings, KafkaByteConsumer} -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.common.serialization.ByteArrayDeserializer /** diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index 075c0d7c9..141c7bd5d 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -13,7 +13,8 @@ import cats.effect.syntax.all._ import cats.syntax.all._ import fs2.Chunk import fs2.kafka._ -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ +import scala.jdk.DurationConverters._ import fs2.kafka.instances._ import fs2.kafka.internal.KafkaConsumerActor._ import fs2.kafka.internal.LogEntry._ @@ -410,7 +411,7 @@ private[kafka] final class KafkaConsumerActor[F[_], K, V]( .map(_.toMap) private[this] val pollTimeout: Duration = - settings.pollTimeout.asJava + settings.pollTimeout.toJava private[this] val poll: F[Unit] = { def pollConsumer(state: State[F, K, V]): F[ConsumerRecords] = diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala index c39040351..290b53508 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala @@ -13,6 +13,7 @@ import fs2.kafka.admin.MkAdminClient import fs2.kafka.internal.syntax._ import org.apache.kafka.clients.admin.AdminClient import org.apache.kafka.common.KafkaFuture +import scala.jdk.DurationConverters._ private[kafka] sealed abstract class WithAdminClient[F[_]] { def apply[A](f: AdminClient => KafkaFuture[A]): F[A] @@ -32,7 +33,7 @@ private[kafka] object WithAdminClient { } val close = - F.blocking(adminClient.close(settings.closeTimeout.asJava)) + F.blocking(adminClient.close(settings.closeTimeout.toJava)) (withAdminClient, close) } diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala index 6a073d32f..fe37f808b 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala @@ -10,7 +10,7 @@ import cats.effect.{Async, Resource} import cats.implicits._ import fs2.kafka.consumer.MkConsumer import fs2.kafka.{ConsumerSettings, KafkaByteConsumer} -import fs2.kafka.internal.syntax._ +import scala.jdk.DurationConverters._ private[kafka] sealed abstract class WithConsumer[F[_]] { def blocking[A](f: KafkaByteConsumer => A): F[A] @@ -34,7 +34,7 @@ private[kafka] object WithConsumer { b(f(consumer)) } } - }(_.blocking { _.close(settings.closeTimeout.asJava) }) + }(_.blocking { _.close(settings.closeTimeout.toJava) }) } } } diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala index c0ee7aa64..12c8b9901 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala @@ -10,7 +10,7 @@ import fs2.kafka.producer.MkProducer import cats.effect.{Async, Resource} import cats.implicits._ import fs2.kafka.{KafkaByteProducer, ProducerSettings, TransactionalProducerSettings} -import fs2.kafka.internal.syntax._ +import scala.jdk.DurationConverters._ private[kafka] sealed abstract class WithProducer[F[_]] { def apply[A](f: (KafkaByteProducer, Blocking[F]) => F[A]): F[A] @@ -36,7 +36,7 @@ private[kafka] object WithProducer { Resource .make( mk(settings) - )(producer => blockingF { producer.close(settings.closeTimeout.asJava) }) + )(producer => blockingF { producer.close(settings.closeTimeout.toJava) }) .map(create(_, blockingG)) } @@ -56,7 +56,7 @@ private[kafka] object WithProducer { val initTransactions = withProducer.blocking { _.initTransactions() } val close = withProducer.blocking { - _.close(settings.producerSettings.closeTimeout.asJava) + _.close(settings.producerSettings.closeTimeout.toJava) } initTransactions.as((withProducer, close)) diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index 2bb8886c1..db6f6a715 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -11,16 +11,12 @@ import cats.effect.Async import cats.effect.syntax.all._ import cats.implicits._ import fs2.kafka.{Header, Headers, KafkaHeaders} -import fs2.kafka.internal.converters.unsafeWrapArray -import fs2.kafka.internal.converters.collection._ -import java.time.Duration -import java.time.temporal.ChronoUnit +import scala.jdk.CollectionConverters._ import java.util -import java.util.concurrent.{CancellationException, CompletionException, TimeUnit} +import java.util.concurrent.{CancellationException, CompletionException} import org.apache.kafka.common.KafkaFuture import org.apache.kafka.common.KafkaFuture.{BaseFunction, BiConsumer} -import scala.collection.immutable.SortedSet -import scala.concurrent.duration.FiniteDuration +import scala.collection.immutable.{ArraySeq, SortedSet} private[kafka] object syntax { implicit final class LoggingSyntax[F[_], A]( @@ -33,23 +29,6 @@ private[kafka] object syntax { fa.flatMap(a => logging.log(f(a))) } - implicit final class FiniteDurationSyntax( - private val duration: FiniteDuration - ) extends AnyVal { - def asJava: Duration = - if (duration.length == 0L) Duration.ZERO - else - duration.unit match { - case TimeUnit.DAYS => Duration.ofDays(duration.length) - case TimeUnit.HOURS => Duration.ofHours(duration.length) - case TimeUnit.MINUTES => Duration.ofMinutes(duration.length) - case TimeUnit.SECONDS => Duration.ofSeconds(duration.length) - case TimeUnit.MILLISECONDS => Duration.ofMillis(duration.length) - case TimeUnit.MICROSECONDS => Duration.of(duration.length, ChronoUnit.MICROS) - case TimeUnit.NANOSECONDS => Duration.ofNanos(duration.length) - } - } - implicit final class FoldableSyntax[F[_], A]( private val fa: F[A] ) extends AnyVal { @@ -221,7 +200,7 @@ private[kafka] object syntax { ) extends AnyVal { def asScala: Headers = Headers.fromSeq { - unsafeWrapArray { + ArraySeq.unsafeWrapArray { headers.toArray.map { header => Header(header.key, header.value) } diff --git a/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala b/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala index ea5d07bd9..b9a8536a9 100644 --- a/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala @@ -9,7 +9,7 @@ package fs2.kafka.producer import cats.effect.Sync import fs2.kafka.{KafkaByteProducer, ProducerSettings} import org.apache.kafka.common.serialization.ByteArraySerializer -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ /** * A capability trait representing the ability to instantiate the Java diff --git a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala index 0519b70a1..4781cc211 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala @@ -27,7 +27,7 @@ This file contains code derived from the Embedded Kafka library package fs2.kafka import cats.effect.Sync -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import java.util.UUID import scala.util.Failure diff --git a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala index 2e377bfee..74c22fe2c 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala @@ -1,7 +1,7 @@ package fs2.kafka import cats.data.Chain -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ final class HeadersSpec extends BaseSpec { describe("Headers#empty") { diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala index d73805d08..024790150 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala @@ -8,7 +8,7 @@ import cats.implicits._ import cats.effect.unsafe.implicits.global import fs2.Stream import fs2.concurrent.SignallingRef -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer.NoOffsetForPartitionException import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.errors.TimeoutException diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 50d21a8a3..922b35241 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -6,7 +6,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.implicits._ import fs2.{Chunk, Stream} -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import fs2.kafka.producer.MkProducer import org.apache.kafka.clients.consumer.{ConsumerConfig, OffsetAndMetadata} import org.apache.kafka.common.TopicPartition diff --git a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala index 944f5aeda..df168010f 100644 --- a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala @@ -3,20 +3,9 @@ package fs2.kafka.internal import fs2.kafka._ import fs2.kafka.BaseSpec import fs2.kafka.internal.syntax._ -import java.time.temporal.ChronoUnit.MICROS import org.apache.kafka.common.header.internals.RecordHeaders -import scala.concurrent.duration._ final class SyntaxSpec extends BaseSpec { - describe("FiniteDuration#asJava") { - it("should convert days") { assert(1.day.asJava == java.time.Duration.ofDays(1)) } - it("should convert hours") { assert(1.hour.asJava == java.time.Duration.ofHours(1)) } - it("should convert minutes") { assert(1.minute.asJava == java.time.Duration.ofMinutes(1)) } - it("should convert seconds") { assert(1.second.asJava == java.time.Duration.ofSeconds(1)) } - it("should convert millis") { assert(1.milli.asJava == java.time.Duration.ofMillis(1)) } - it("should convert micros") { assert(1.micro.asJava == java.time.Duration.of(1, MICROS)) } - it("should convert nanos") { assert(1.nanos.asJava == java.time.Duration.ofNanos(1)) } - } describe("Map#filterKeysStrictValuesList") { it("should be the same as toList.collect") { diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala index 59a2bde0f..95904d1b3 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala @@ -8,7 +8,7 @@ package fs2.kafka.vulcan import cats.effect.Sync import cats.implicits._ -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ import fs2.kafka.internal.syntax._ /** diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala index e7365e0ee..b43b79c64 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala @@ -8,7 +8,7 @@ package fs2.kafka.vulcan import cats.effect.Sync import cats.Show -import fs2.kafka.internal.converters.collection._ +import scala.jdk.CollectionConverters._ /** * Describes how to create a `SchemaRegistryClient` and which From a882cad59dd40fea4953a5e27a3be966e799ffa4 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 17 Jul 2021 12:06:57 +0100 Subject: [PATCH 002/162] update github workflow --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 19a4be006..b058e1cae 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.12.14, 2.13.6, 3.0.1] + scala: [2.13.6, 3.0.1] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: From 30a3c8c6152891dfc000dfb53d11cd26dbbd47f1 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 17 Jul 2021 12:42:58 +0100 Subject: [PATCH 003/162] scalafmt --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 006895565..b81415527 100644 --- a/build.sbt +++ b/build.sbt @@ -367,7 +367,7 @@ addCommandsAlias( List( "clean", "test", - // "mimaReportBinaryIssues", + // "mimaReportBinaryIssues", "scalafmtCheck", "scalafmtSbtCheck", "headerCheck", From 7c42007f818bae26694e5c67e50498a48b155e06 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 17 Oct 2021 21:33:14 +0100 Subject: [PATCH 004/162] Update to Kafka 3.0 --- build.sbt | 2 +- .../kafka/TransactionalKafkaProducer.scala | 5 ++-- .../scala/fs2/kafka/ConsumerRecordSpec.scala | 23 ++++++++++--------- .../scala/fs2/kafka/ProducerResultSpec.scala | 6 ++--- .../TransactionalKafkaProducerSpec.scala | 6 ++--- 5 files changed, 22 insertions(+), 20 deletions(-) diff --git a/build.sbt b/build.sbt index a946da54b..d418f247f 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "6.2.1" val fs2Version = "3.1.0" -val kafkaVersion = "2.8.1" +val kafkaVersion = "3.0.0" val testcontainersScalaVersion = "0.39.8" diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 6cebadf25..3f2545ac9 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -7,12 +7,13 @@ package fs2.kafka import cats.effect.syntax.all._ -import cats.effect.{Async, Resource, Outcome} +import cats.effect.{Async, Outcome, Resource} import cats.syntax.all._ import fs2.kafka.internal._ import scala.jdk.CollectionConverters._ import fs2.kafka.producer.MkProducer import fs2.{Chunk, Stream} +import org.apache.kafka.clients.consumer.ConsumerGroupMetadata import org.apache.kafka.clients.producer.RecordMetadata import org.apache.kafka.common.{Metric, MetricName} @@ -113,7 +114,7 @@ object TransactionalKafkaProducer { blocking { producer.sendOffsetsToTransaction( batch.offsets.asJava, - groupId + new ConsumerGroupMetadata(groupId) ) } } diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala index e60a0d46d..d06deee3a 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala @@ -7,6 +7,7 @@ import org.apache.kafka.clients.consumer.ConsumerRecord.{NULL_SIZE, NO_TIMESTAMP import org.apache.kafka.common.record.TimestampType import org.apache.kafka.common.record.TimestampType._ import org.scalatest._ +import scala.jdk.OptionConverters._ final class ConsumerRecordSpec extends BaseSpec { describe("ConsumerRecord#fromJava") { @@ -18,14 +19,15 @@ final class ConsumerRecordSpec extends BaseSpec { new KafkaByteConsumerRecord( "topic", 0, - 1, + 1L, timestamp, timestampType, - 2, 3, 4, "key".getBytes, - "value".getBytes + "value".getBytes, + Headers.empty.asJava, + none[Integer].toJava ) f( @@ -55,11 +57,12 @@ final class ConsumerRecordSpec extends BaseSpec { 1, NO_TIMESTAMP, NO_TIMESTAMP_TYPE, - 2, serializedKeySize, 4, "key".getBytes, - "value".getBytes + "value".getBytes, + Headers.empty.asJava, + none[Integer].toJava ) f( @@ -84,11 +87,12 @@ final class ConsumerRecordSpec extends BaseSpec { 1, NO_TIMESTAMP, NO_TIMESTAMP_TYPE, - 2, 3, serializedValueSize, "key".getBytes, - "value".getBytes + "value".getBytes, + Headers.empty.asJava, + none[Integer].toJava ) f( @@ -113,15 +117,12 @@ final class ConsumerRecordSpec extends BaseSpec { 1, NO_TIMESTAMP, NO_TIMESTAMP_TYPE, - 2, 3, 4, "key".getBytes, "value".getBytes, Headers.empty.asJava, - if (leaderEpoch.nonEmpty) - java.util.Optional.of[java.lang.Integer](leaderEpoch.get) - else java.util.Optional.empty() + leaderEpoch.map(i => i: Integer).toJava ) f( diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala index 46566080a..42bc760b7 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala @@ -21,7 +21,7 @@ final class ProducerResultSpec extends BaseSpec { .withPartition(1) .withTimestamp(0L) .withHeaders(Headers(Header("key", Array[Byte]()))) -> - new RecordMetadata(new TopicPartition("topic", 0), 0L, 0L, 0L, 0L, 0, 0) + new RecordMetadata(new TopicPartition("topic", 0), 0L, 0, 0L, 0, 0) ) assert { @@ -32,9 +32,9 @@ final class ProducerResultSpec extends BaseSpec { val two: Chunk[(ProducerRecord[String, String], RecordMetadata)] = Chunk( ProducerRecord("topic", "key", "value").withPartition(0).withTimestamp(0L) -> - new RecordMetadata(new TopicPartition("topic", 0), 0L, 0L, 0L, 0L, 0, 0), + new RecordMetadata(new TopicPartition("topic", 0), 0L, 0, 0L, 0, 0), ProducerRecord("topic", "key", "value").withPartition(1).withTimestamp(0L) -> - new RecordMetadata(new TopicPartition("topic", 1), 0L, 0L, 0L, 0L, 0, 0) + new RecordMetadata(new TopicPartition("topic", 1), 0L, 0, 0L, 0, 0) ) assert { diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 922b35241..2883ec3c3 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -8,7 +8,7 @@ import cats.implicits._ import fs2.{Chunk, Stream} import scala.jdk.CollectionConverters._ import fs2.kafka.producer.MkProducer -import org.apache.kafka.clients.consumer.{ConsumerConfig, OffsetAndMetadata} +import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerGroupMetadata, OffsetAndMetadata} import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.errors.InvalidProducerEpochException import org.apache.kafka.common.serialization.ByteArraySerializer @@ -169,12 +169,12 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { ) { override def sendOffsetsToTransaction( offsets: util.Map[TopicPartition, OffsetAndMetadata], - consumerGroupId: String + groupMetadata: ConsumerGroupMetadata ): Unit = if (offsets.containsKey(new TopicPartition(topic, 2))) { throw error } else { - super.sendOffsetsToTransaction(offsets, consumerGroupId) + super.sendOffsetsToTransaction(offsets, groupMetadata) } } } From ee73cdf99693ebc67a08228003a144f47638f508 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 17 Oct 2021 21:40:47 +0100 Subject: [PATCH 005/162] Remove custom KafkaFuture cancelable implementation --- .../scala/fs2/kafka/internal/syntax.scala | 26 +++---------------- 1 file changed, 3 insertions(+), 23 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index db6f6a715..3fbe304df 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -8,14 +8,12 @@ package fs2.kafka.internal import cats.{FlatMap, Foldable, Show} import cats.effect.Async -import cats.effect.syntax.all._ import cats.implicits._ import fs2.kafka.{Header, Headers, KafkaHeaders} import scala.jdk.CollectionConverters._ import java.util -import java.util.concurrent.{CancellationException, CompletionException} import org.apache.kafka.common.KafkaFuture -import org.apache.kafka.common.KafkaFuture.{BaseFunction, BiConsumer} +import org.apache.kafka.common.KafkaFuture.BaseFunction import scala.collection.immutable.{ArraySeq, SortedSet} private[kafka] object syntax { @@ -165,8 +163,7 @@ private[kafka] object syntax { implicit final class KafkaFutureSyntax[A]( private val future: KafkaFuture[A] ) extends AnyVal { - private[this] def baseFunction[B](f: A => B): BaseFunction[A, B] = - new BaseFunction[A, B] { override def apply(a: A): B = f(a) } + private[this] def baseFunction[B](f: A => B): BaseFunction[A, B] = f(_) def map[B](f: A => B): KafkaFuture[B] = future.thenApply(baseFunction(f)) @@ -174,25 +171,8 @@ private[kafka] object syntax { def void: KafkaFuture[Unit] = map(_ => ()) - def cancelToken[F[_]](implicit F: Async[F]): F[Option[F[Unit]]] = - F.blocking { future.cancel(true); () }.start.map(_.cancel.some) - - // Inspired by Monix's `CancelableFuture#fromJavaCompletable`. def cancelable[F[_]](implicit F: Async[F]): F[A] = - F.async { (cb: (Either[Throwable, A] => Unit)) => - F.blocking { - future - .whenComplete(new BiConsumer[A, Throwable] { - override def accept(a: A, t: Throwable): Unit = t match { - case null => cb(a.asRight) - case _: CancellationException => () - case e: CompletionException if e.getCause != null => cb(e.getCause.asLeft) - case e => cb(e.asLeft) - } - }) - } - .flatMap(_.cancelToken) - } + F.fromCompletableFuture(F.delay(future.toCompletionStage.toCompletableFuture)) } implicit final class KafkaHeadersSyntax( From a01644012c1a9336124de913481dae30915b059c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 19 Oct 2021 14:29:28 +0200 Subject: [PATCH 006/162] Update fs2-core to 3.1.6 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a946da54b..12c4dcac5 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ val catsVersion = "2.6.1" val confluentVersion = "6.2.1" -val fs2Version = "3.1.0" +val fs2Version = "3.1.6" val kafkaVersion = "2.8.1" From 9c94f1317d3a1b4634660da6ec45531848f04ae5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 19 Oct 2021 14:29:38 +0200 Subject: [PATCH 007/162] Update sbt-ci-release to 1.5.10 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 63f52236e..9bab9cad7 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.23") From 8b48c438c1f8535218c3a36005a5714f9f78e71e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 19 Oct 2021 14:29:59 +0200 Subject: [PATCH 008/162] Update scala3-library to 3.1.0 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a946da54b..8b7a67b09 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,7 @@ val munitVersion = "0.7.29" val scala2 = "2.13.6" -val scala3 = "3.0.2" +val scala3 = "3.1.0" lazy val `fs2-kafka` = project .in(file(".")) From 184d1f6e5103633e674a9198a5b95aa918d5d5c9 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 19 Oct 2021 14:30:31 +0200 Subject: [PATCH 009/162] Regenerate workflow with sbt-github-actions --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3080466f9..bd4293a94 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.6, 3.0.2] + scala: [2.13.6, 3.1.0] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: From f118ad4ac11f28ab529487feea608c64869c0d17 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 20 Oct 2021 14:19:00 +0200 Subject: [PATCH 010/162] Update sbt-mdoc to 2.2.24 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 63f52236e..9f82d7122 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,6 +3,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.23") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.24") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.13.0") From 25b2c339939f2c9fe6f8019f741695aee25e1797 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 24 Oct 2021 02:20:43 +0200 Subject: [PATCH 011/162] Update sbt-unidoc to 0.5.0 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 63f52236e..4f61a0ccf 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,5 +1,5 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") -addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") +addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") From c87f323c5458f208fb069211295f966a62e24980 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 2 Nov 2021 10:51:44 +0100 Subject: [PATCH 012/162] Update scala-library to 2.13.7 in series/3.x --- .github/workflows/ci.yml | 6 +++--- build.sbt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3080466f9..224e38190 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.6, 3.0.2] + scala: [2.13.7, 3.0.2] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: @@ -54,7 +54,7 @@ jobs: - run: sbt ++${{ matrix.scala }} ci - - if: matrix.scala == '2.13.6' + - if: matrix.scala == '2.13.7' run: sbt ++${{ matrix.scala }} docs/run publish: @@ -64,7 +64,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.6] + scala: [2.13.7] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: diff --git a/build.sbt b/build.sbt index a946da54b..b5cf61f48 100644 --- a/build.sbt +++ b/build.sbt @@ -14,7 +14,7 @@ val vulcanVersion = "1.7.1" val munitVersion = "0.7.29" -val scala2 = "2.13.6" +val scala2 = "2.13.7" val scala3 = "3.0.2" From 9f3dd8bef8ad037edf7442d3ebd52b2c886272e6 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 14 Nov 2021 22:21:08 +0100 Subject: [PATCH 013/162] Update testcontainers-scala-kafka, ... to 0.39.12 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a946da54b..1453464e4 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val fs2Version = "3.1.0" val kafkaVersion = "2.8.1" -val testcontainersScalaVersion = "0.39.8" +val testcontainersScalaVersion = "0.39.12" val vulcanVersion = "1.7.1" From 63389b50ac8d902a734e4e7e3f54d580eda1547c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 10 Dec 2021 02:25:19 +0100 Subject: [PATCH 014/162] Update kafka-avro-serializer to 6.2.2 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a946da54b..a2066a435 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.2.9" val catsVersion = "2.6.1" -val confluentVersion = "6.2.1" +val confluentVersion = "6.2.2" val fs2Version = "3.1.0" From 00f2bf6d5953d7a9f88d0874facb10de806556bd Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 11 Dec 2021 06:41:10 +0100 Subject: [PATCH 015/162] Update sbt to 1.5.6 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 7a7e80d6d..da9d652bf 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.5.5 +sbt.version = 1.5.6 From e48fa963319e04fd0b967e0edd422fdbce200d10 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 14 Dec 2021 14:33:44 +0100 Subject: [PATCH 016/162] Update logback-classic to 1.2.8 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a946da54b..329c2e7dd 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val dependencySettings = Seq( "org.typelevel" %% "discipline-scalatest" % "2.1.5", "org.typelevel" %% "cats-effect-laws" % catsEffectVersion, "org.typelevel" %% "cats-effect-testkit" % catsEffectVersion, - "ch.qos.logback" % "logback-classic" % "1.2.6" + "ch.qos.logback" % "logback-classic" % "1.2.8" ).map(_ % Test), libraryDependencies ++= { if (scalaVersion.value.startsWith("3")) Nil From c7af691b4e8dcc3454b409fbcee39cdfb220d40f Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 15 Dec 2021 10:03:14 +0100 Subject: [PATCH 017/162] Update sbt to 1.5.7 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index da9d652bf..8378cad58 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.5.6 +sbt.version = 1.5.7 From 0f5e2318bf970694e06f598997c87502cc695bfe Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 17 Dec 2021 06:24:35 +0100 Subject: [PATCH 018/162] Update logback-classic to 1.2.9 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 329c2e7dd..0fa630539 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val dependencySettings = Seq( "org.typelevel" %% "discipline-scalatest" % "2.1.5", "org.typelevel" %% "cats-effect-laws" % catsEffectVersion, "org.typelevel" %% "cats-effect-testkit" % catsEffectVersion, - "ch.qos.logback" % "logback-classic" % "1.2.8" + "ch.qos.logback" % "logback-classic" % "1.2.9" ).map(_ % Test), libraryDependencies ++= { if (scalaVersion.value.startsWith("3")) Nil From 664b22e9152396cbcc74b898e2a2a48c797a991a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 21 Dec 2021 03:33:18 +0100 Subject: [PATCH 019/162] Update sbt to 1.5.8 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 8378cad58..f4f743cc4 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.5.7 +sbt.version = 1.5.8 From 33f8087896537fe45c448e474cc54cb9d39c6804 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 23 Dec 2021 17:02:26 +0100 Subject: [PATCH 020/162] Update logback-classic to 1.2.10 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0fa630539..fd1ff8a2d 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val dependencySettings = Seq( "org.typelevel" %% "discipline-scalatest" % "2.1.5", "org.typelevel" %% "cats-effect-laws" % catsEffectVersion, "org.typelevel" %% "cats-effect-testkit" % catsEffectVersion, - "ch.qos.logback" % "logback-classic" % "1.2.9" + "ch.qos.logback" % "logback-classic" % "1.2.10" ).map(_ % Test), libraryDependencies ++= { if (scalaVersion.value.startsWith("3")) Nil From d6bb7760c23279ca1b236aa31f71b19d26150eda Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 25 Dec 2021 07:07:09 +0100 Subject: [PATCH 021/162] Update sbt-scalafmt to 2.4.6 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 63f52236e..820e90df4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,5 +4,5 @@ addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.23") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.13.0") From 2120b9125e276a717c19f5d62981897678c1dffe Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 29 Dec 2021 13:22:16 +0100 Subject: [PATCH 022/162] Update sbt to 1.6.1 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index f4f743cc4..dd4ff4368 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.5.8 +sbt.version = 1.6.1 From 94fc4ef15ef56e9c82deaf65b6a2df259bb113b7 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 1 Jan 2022 09:49:29 +0100 Subject: [PATCH 023/162] Update cats-effect, cats-effect-laws, ... to 3.3.3 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index fd1ff8a2d..88ef22538 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,4 @@ -val catsEffectVersion = "3.2.9" +val catsEffectVersion = "3.3.3" val catsVersion = "2.6.1" From 158013c3b52fb8fc8344d3237a612aeb3c868132 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 2 Jan 2022 18:22:00 +0100 Subject: [PATCH 024/162] Update fs2-core to 3.2.4 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index c19838673..970757fa9 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ val catsVersion = "2.6.1" val confluentVersion = "6.2.2" -val fs2Version = "3.1.6" +val fs2Version = "3.2.4" val kafkaVersion = "2.8.1" From a6471bdc708c6dd5db0221b4999bec2b090ea2cc Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 2 Jan 2022 18:22:12 +0100 Subject: [PATCH 025/162] Update kafka-avro-serializer to 7.0.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index c19838673..31baed119 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.3" val catsVersion = "2.6.1" -val confluentVersion = "6.2.2" +val confluentVersion = "7.0.1" val fs2Version = "3.1.6" From dc5bf2e22c40a58a94d595a56137f00024eda624 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 2 Jan 2022 17:58:29 +0000 Subject: [PATCH 026/162] Revert "Update kafka-avro-serializer to 7.0.1 in series/3.x" --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index ad1a39aaa..970757fa9 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.3" val catsVersion = "2.6.1" -val confluentVersion = "7.0.1" +val confluentVersion = "6.2.2" val fs2Version = "3.2.4" From 273c968b5c767774b8f84b366c3bdbaa3443eeed Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 8 Jan 2022 17:21:02 +0000 Subject: [PATCH 027/162] Fix test --- .../src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala index 2283f9aa5..5239f5322 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala @@ -160,10 +160,11 @@ final class KafkaAdminClientSpec extends BaseKafkaSpec { Map(cr -> List(new AlterConfigOp(ce, AlterConfigOp.OpType.SET))) }.attempt _ <- IO(assert(alteredConfigs.isRight)) - describedConfigs <- adminClient.describeConfigs(List(cr)).attempt + describedConfigs <- adminClient.describeConfigs(List(cr)) _ <- IO( assert( - describedConfigs.toOption.flatMap(_.get(cr)).map(_.contains(ce)).getOrElse(false) + describedConfigs(cr) + .exists(actual => actual.name == ce.name && actual.value == ce.value) ) ) } yield () @@ -225,7 +226,7 @@ final class KafkaAdminClientSpec extends BaseKafkaSpec { describedTopics <- adminClient.describeTopics(topic :: Nil) _ <- IO(assert(describedTopics.size == 1)) _ <- IO( - assert(describedTopics.headOption.map(_._2.partitions.size == 4).getOrElse(false)) + assert(describedTopics.headOption.exists(_._2.partitions.size == 4)) ) deleteTopics <- adminClient .deleteTopics(List(topic)) From 3d66362e6a50e9d8336d8b25ad8bbb8ff4b527c6 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 8 Jan 2022 17:24:46 +0000 Subject: [PATCH 028/162] Update confluent to 7.0.1 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 3b2b674e3..f836f37a3 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.3" val catsVersion = "2.6.1" -val confluentVersion = "6.2.2" +val confluentVersion = "7.0.1" val fs2Version = "3.2.4" From e643017b23ab435caee927e8257ee84216e898bf Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 9 Jan 2022 03:18:06 +0100 Subject: [PATCH 029/162] Update cats-effect, cats-effect-laws, ... to 3.3.4 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f9fb0ee70..647d49a81 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,4 @@ -val catsEffectVersion = "3.3.3" +val catsEffectVersion = "3.3.4" val catsVersion = "2.6.1" From ff693452104801cf759d82d42b46891c188bbab7 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 13 Jan 2022 07:54:31 +0100 Subject: [PATCH 030/162] Update scala-library to 2.13.8 in series/3.x --- .github/workflows/ci.yml | 6 +++--- build.sbt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ec42bdf5f..d7a5887b0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.7, 3.1.0] + scala: [2.13.8, 3.1.0] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: @@ -54,7 +54,7 @@ jobs: - run: sbt ++${{ matrix.scala }} ci - - if: matrix.scala == '2.13.7' + - if: matrix.scala == '2.13.8' run: sbt ++${{ matrix.scala }} docs/run publish: @@ -64,7 +64,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.7] + scala: [2.13.8] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: diff --git a/build.sbt b/build.sbt index 647d49a81..481489958 100644 --- a/build.sbt +++ b/build.sbt @@ -14,7 +14,7 @@ val vulcanVersion = "1.7.1" val munitVersion = "0.7.29" -val scala2 = "2.13.7" +val scala2 = "2.13.8" val scala3 = "3.1.0" From 5bc9a7e325338c7e2ca120aba1c739d138a1633d Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 22 Jan 2022 00:15:56 +0100 Subject: [PATCH 031/162] Update kafka-clients to 3.1.0 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0be1d7bd5..760df7a84 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.0.1" val fs2Version = "3.2.4" -val kafkaVersion = "3.0.0" +val kafkaVersion = "3.1.0" val testcontainersScalaVersion = "0.39.12" From 44253bb932d8ed2d19b31738cd8a8f11868f4122 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 1 Feb 2022 15:45:32 +0100 Subject: [PATCH 032/162] Update sbt to 1.6.2 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index dd4ff4368..f6acff8b3 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.6.1 +sbt.version = 1.6.2 From f672900d6da828c577dad6d9b5de38554bc30d81 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 1 Feb 2022 23:29:51 +0100 Subject: [PATCH 033/162] Update scala3-library to 3.1.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0be1d7bd5..04d87f3fe 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,7 @@ val munitVersion = "0.7.29" val scala2 = "2.13.8" -val scala3 = "3.1.0" +val scala3 = "3.1.1" lazy val `fs2-kafka` = project .in(file(".")) From fe69d670a3fb6d0a214a13648789519d83846d18 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 1 Feb 2022 23:30:32 +0100 Subject: [PATCH 034/162] Regenerate workflow with sbt-github-actions --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d7a5887b0..a0ad6377c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8, 3.1.0] + scala: [2.13.8, 3.1.1] java: [adopt@1.8] runs-on: ${{ matrix.os }} steps: From b60b4a6955c39708f9864ec7372acacfc046a256 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 7 Feb 2022 12:21:45 +0100 Subject: [PATCH 035/162] Update sbt-header to 5.6.5 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 24e487c10..86e94c8f7 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,7 +2,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") +addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.24") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.13.0") From 83a09b944d3cfde8a1297f1815a322b85516b6c0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 17 Feb 2022 22:27:14 +0100 Subject: [PATCH 036/162] Update sbt-buildinfo to 0.11.0 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 24e487c10..5ed78e1cc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,4 @@ -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") From a7e7f7ee167553efc46bb2c30625b364980392ac Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 19 Feb 2022 20:27:17 +0100 Subject: [PATCH 037/162] Update fs2-core to 3.2.5 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0be1d7bd5..7863bd157 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ val catsVersion = "2.6.1" val confluentVersion = "7.0.1" -val fs2Version = "3.2.4" +val fs2Version = "3.2.5" val kafkaVersion = "3.0.0" From a553e6e208d8795fd9f54472e7735af9974ccf02 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 28 Feb 2022 07:26:40 +0100 Subject: [PATCH 038/162] Update cats-effect, cats-effect-laws, ... to 3.3.6 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 550bb4b6b..ec93cdbc3 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,4 @@ -val catsEffectVersion = "3.3.4" +val catsEffectVersion = "3.3.6" val catsVersion = "2.6.1" From 9eec2426e4bb958c155c665a25ce9aef98f45ea3 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Wed, 9 Mar 2022 16:43:57 +0000 Subject: [PATCH 039/162] Don't publish docs from 3.x branch --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a0ad6377c..7f3633c78 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,4 +96,4 @@ jobs: GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt ++${{ matrix.scala }} ci-release docs/docusaurusPublishGhpages + run: sbt ++${{ matrix.scala }} ci-release # docs/docusaurusPublishGhpages From 05b838b3936db353d45f958076aa8b82f8d8484f Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Wed, 9 Mar 2022 16:47:28 +0000 Subject: [PATCH 040/162] Fix workflow --- .github/workflows/ci.yml | 2 +- build.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7f3633c78..eb97776d3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -96,4 +96,4 @@ jobs: GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt ++${{ matrix.scala }} ci-release # docs/docusaurusPublishGhpages + run: sbt ++${{ matrix.scala }} ci-release diff --git a/build.sbt b/build.sbt index 0ff3ee8a3..dd70a6b07 100644 --- a/build.sbt +++ b/build.sbt @@ -212,7 +212,7 @@ ThisBuild / githubWorkflowPublishTargetBranches := ThisBuild / githubWorkflowPublish := Seq( WorkflowStep.Sbt( - List("ci-release", "docs/docusaurusPublishGhpages"), + List("ci-release"), // For 3.0 release: List("ci-release", "docs/docusaurusPublishGhpages"), env = Map( "GIT_DEPLOY_KEY" -> "${{ secrets.GIT_DEPLOY_KEY }}", "PGP_PASSPHRASE" -> "${{ secrets.PGP_PASSPHRASE }}", From d5a1b5f3f958e3e74f5f0d15f55b79d3106da876 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Wed, 9 Mar 2022 17:00:19 +0000 Subject: [PATCH 041/162] Fix deprecation --- modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index d188daea3..4ea1daaef 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -313,7 +313,7 @@ object KafkaAdminClient { withAdminClient: WithAdminClient[F], topics: G[String] )(implicit G: Foldable[G]): F[Map[String, TopicDescription]] = - withAdminClient(_.describeTopics(topics.asJava).all.map(_.toMap)) + withAdminClient(_.describeTopics(topics.asJava).allTopicNames.map(_.toMap)) private[this] def describeAclsWith[F[_]]( withAdminClient: WithAdminClient[F], From b8647171fed71af79c098e1db456be76a4f91d2c Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:07:07 +0000 Subject: [PATCH 042/162] Add parameter to Serializer to indicate key or value --- .gitignore | 2 ++ .../main/scala/fs2/kafka/KafkaProducer.scala | 16 ++++++------ .../fs2/kafka/KafkaProducerConnection.scala | 9 ++++--- .../scala/fs2/kafka/ProducerSettings.scala | 20 +++++++-------- .../scala/fs2/kafka/RecordSerializer.scala | 21 ++++++++-------- .../src/main/scala/fs2/kafka/Serializer.scala | 25 ++++++++++++------- .../src/main/scala/fs2/kafka/package.scala | 5 ++++ .../fs2/kafka/vulcan/AvroSerializer.scala | 4 +-- 8 files changed, 59 insertions(+), 43 deletions(-) diff --git a/.gitignore b/.gitignore index f7f4fa41e..8fc2c65b6 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ target/ .metals/ .vscode/ +.bloop/ +metals.sbt \ No newline at end of file diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index c1a9d7fad..e51cdccf1 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -141,8 +141,8 @@ object KafkaProducer { private[kafka] def from[F[_]: Async, K, V]( withProducer: WithProducer[F], - keySerializer: Serializer[F, K], - valueSerializer: Serializer[F, V] + keySerializer: KeySerializer[F, K], + valueSerializer: ValueSerializer[F, V] ): KafkaProducer.Metrics[F, K, V] = new KafkaProducer.Metrics[F, K, V] { override def produce[P]( @@ -178,8 +178,8 @@ object KafkaProducer { Stream.resource(KafkaProducer.resource(settings)) private[kafka] def produceRecord[F[_], K, V]( - keySerializer: Serializer[F, K], - valueSerializer: Serializer[F, V], + keySerializer: KeySerializer[F, K], + valueSerializer: ValueSerializer[F, V], producer: KafkaByteProducer, blocking: Blocking[F] )( @@ -225,8 +225,8 @@ object KafkaProducer { _.evalMap(producer.produce).mapAsync(settings.parallelism)(identity) private[this] def serializeToBytes[F[_], K, V]( - keySerializer: Serializer[F, K], - valueSerializer: Serializer[F, V], + keySerializer: KeySerializer[F, K], + valueSerializer: ValueSerializer[F, V], record: ProducerRecord[K, V] )(implicit F: Apply[F]): F[(Array[Byte], Array[Byte])] = { val keyBytes = @@ -239,8 +239,8 @@ object KafkaProducer { } private[this] def asJavaRecord[F[_], K, V]( - keySerializer: Serializer[F, K], - valueSerializer: Serializer[F, V], + keySerializer: KeySerializer[F, K], + valueSerializer: ValueSerializer[F, V], record: ProducerRecord[K, V] )(implicit F: Apply[F]): F[KafkaByteProducerRecord] = serializeToBytes(keySerializer, valueSerializer, record).map { diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala index 5d1243364..9c656bb9b 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala @@ -30,8 +30,8 @@ sealed abstract class KafkaProducerConnection[F[_]] { * }}} */ def withSerializers[K, V]( - keySerializer: Serializer[F, K], - valueSerializer: Serializer[F, V] + keySerializer: KeySerializer[F, K], + valueSerializer: ValueSerializer[F, V] ): KafkaProducer.Metrics[F, K, V] /** @@ -106,8 +106,8 @@ object KafkaProducerConnection { WithProducer(mk, settings).map { withProducer => new KafkaProducerConnection[G] { override def withSerializers[K, V]( - keySerializer: Serializer[G, K], - valueSerializer: Serializer[G, V] + keySerializer: KeySerializer[G, K], + valueSerializer: ValueSerializer[G, V] ): KafkaProducer.Metrics[G, K, V] = KafkaProducer.from(withProducer, keySerializer, valueSerializer) @@ -115,6 +115,7 @@ object KafkaProducerConnection { settings: ProducerSettings[G, K, V] ): G[KafkaProducer.Metrics[G, K, V]] = (settings.keySerializer, settings.valueSerializer).mapN(withSerializers) + } } diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index fd7e30b14..b69d36ef1 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -31,12 +31,12 @@ sealed abstract class ProducerSettings[F[_], K, V] { /** * The `Serializer` to use for serializing record keys. */ - def keySerializer: F[Serializer[F, K]] + def keySerializer: F[KeySerializer[F, K]] /** * The `Serializer` to use for serializing record values. */ - def valueSerializer: F[Serializer[F, V]] + def valueSerializer: F[ValueSerializer[F, V]] /** * A custom [[ExecutionContext]] to use for blocking Kafka operations. @@ -235,8 +235,8 @@ sealed abstract class ProducerSettings[F[_], K, V] { object ProducerSettings { private[this] final case class ProducerSettingsImpl[F[_], K, V]( - override val keySerializer: F[Serializer[F, K]], - override val valueSerializer: F[Serializer[F, V]], + override val keySerializer: F[KeySerializer[F, K]], + override val valueSerializer: F[ValueSerializer[F, V]], override val customBlockingContext: Option[ExecutionContext], override val properties: Map[String, String], override val closeTimeout: FiniteDuration, @@ -312,8 +312,8 @@ object ProducerSettings { } private[this] def create[F[_], K, V]( - keySerializer: F[Serializer[F, K]], - valueSerializer: F[Serializer[F, V]] + keySerializer: F[KeySerializer[F, K]], + valueSerializer: F[ValueSerializer[F, V]] ): ProducerSettings[F, K, V] = ProducerSettingsImpl( keySerializer = keySerializer, @@ -327,8 +327,8 @@ object ProducerSettings { ) def apply[F[_], K, V]( - keySerializer: Serializer[F, K], - valueSerializer: Serializer[F, V] + keySerializer: KeySerializer[F, K], + valueSerializer: ValueSerializer[F, V] )(implicit F: Applicative[F]): ProducerSettings[F, K, V] = create( keySerializer = F.pure(keySerializer), @@ -337,7 +337,7 @@ object ProducerSettings { def apply[F[_], K, V]( keySerializer: RecordSerializer[F, K], - valueSerializer: Serializer[F, V] + valueSerializer: ValueSerializer[F, V] )(implicit F: Applicative[F]): ProducerSettings[F, K, V] = create( keySerializer = keySerializer.forKey, @@ -345,7 +345,7 @@ object ProducerSettings { ) def apply[F[_], K, V]( - keySerializer: Serializer[F, K], + keySerializer: KeySerializer[F, K], valueSerializer: RecordSerializer[F, V] )(implicit F: Applicative[F]): ProducerSettings[F, K, V] = create( diff --git a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala index 4145b145b..e961132cd 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala @@ -6,7 +6,8 @@ package fs2.kafka -import cats.Applicative +import cats._ +import cats.syntax.all._ /** * Serializer which may vary depending on whether a record @@ -14,9 +15,9 @@ import cats.Applicative * a creation effect. */ sealed abstract class RecordSerializer[F[_], A] { - def forKey: F[Serializer[F, A]] + def forKey: F[KeySerializer[F, A]] - def forValue: F[Serializer[F, A]] + def forValue: F[ValueSerializer[F, A]] } object RecordSerializer { @@ -25,26 +26,26 @@ object RecordSerializer { ): RecordSerializer[F, A] = serializer - def const[F[_], A]( + def const[F[_]: Functor, A]( serializer: => F[Serializer[F, A]] ): RecordSerializer[F, A] = RecordSerializer.instance( - forKey = serializer, - forValue = serializer + forKey = serializer.widen, + forValue = serializer.widen ) def instance[F[_], A]( - forKey: => F[Serializer[F, A]], - forValue: => F[Serializer[F, A]] + forKey: => F[KeySerializer[F, A]], + forValue: => F[ValueSerializer[F, A]] ): RecordSerializer[F, A] = { def _forKey = forKey def _forValue = forValue new RecordSerializer[F, A] { - override def forKey: F[Serializer[F, A]] = + override def forKey: F[KeySerializer[F, A]] = _forKey - override def forValue: F[Serializer[F, A]] = + override def forValue: F[ValueSerializer[F, A]] = _forValue override def toString: String = diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 36eb1333b..d66410aee 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -12,12 +12,14 @@ import cats.syntax.all._ import java.nio.charset.{Charset, StandardCharsets} import java.util.UUID -/** - * Functional composable Kafka key- and record serializer with - * support for effect types. - */ -sealed abstract class Serializer[F[_], A] { +sealed trait SerdeType +object SerdeType { + sealed trait Key extends SerdeType + sealed trait Value extends SerdeType + sealed trait KeyOrValue extends Key with Value +} +sealed abstract class GenSerializer[+T <: SerdeType, F[_], A] { /** * Attempts to serialize the specified value of type `A` into * bytes. The Kafka topic name, to which the serialized bytes @@ -30,19 +32,19 @@ sealed abstract class Serializer[F[_], A] { * function `f` on a value of type `B`, and then serializes * the result with this [[Serializer]]. */ - def contramap[B](f: B => A): Serializer[F, B] + def contramap[B](f: B => A): GenSerializer[T, F, B] /** * Creates a new [[Serializer]] which applies the specified * function `f` on the output bytes of this [[Serializer]]. */ - def mapBytes(f: Array[Byte] => Array[Byte]): Serializer[F, A] + def mapBytes(f: Array[Byte] => Array[Byte]): GenSerializer[T, F, A] /** * Creates a new [[Serializer]] which serializes `Some` values * using this [[Serializer]], and serializes `None` as `null`. */ - def option: Serializer[F, Option[A]] + def option: GenSerializer[T, F, Option[A]] /** * Creates a new [[Serializer]] which suspends serialization, @@ -51,7 +53,12 @@ sealed abstract class Serializer[F[_], A] { def suspend: Serializer[F, A] } -object Serializer { +/** + * Functional composable Kafka key- and record serializer with + * support for effect types. + */ +object GenSerializer { + def apply[F[_], A](implicit serializer: Serializer[F, A]): Serializer[F, A] = serializer /** Alias for [[Serializer#identity]]. */ diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 6109c6df7..5ce57b0e4 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -58,4 +58,9 @@ package object kafka { implicit F: Temporal[F] ): Pipe[F, CommittableOffset[F], Unit] = _.groupWithin(n, d).evalMap(CommittableOffsetBatch.fromFoldable(_).commit) + + type Serializer[F[_], A] = GenSerializer[SerdeType.KeyOrValue, F, A] + type KeySerializer[F[_], A] = GenSerializer[SerdeType.Key, F, A] + type ValueSerializer[F[_], A] = GenSerializer[SerdeType.Value, F, A] + val Serializer: GenSerializer.type = GenSerializer } diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index 568e1c251..e0458e98d 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -31,8 +31,8 @@ final class AvroSerializer[A] private[vulcan] ( } RecordSerializer.instance( - forKey = createSerializer(true), - forValue = createSerializer(false) + forKey = createSerializer(true).widen, + forValue = createSerializer(false).widen ) } From 692f2a03adf4df113a8b4b4a459e64bd8d46efc3 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:14:08 +0000 Subject: [PATCH 043/162] More parametric stuff in Serializer --- .../src/main/scala/fs2/kafka/Serializer.scala | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index d66410aee..0ca951724 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -180,10 +180,10 @@ object GenSerializer { * [[Serializer]]s depending on the Kafka topic name to * which the bytes are going to be sent. */ - def topic[F[_], A]( - f: PartialFunction[String, Serializer[F, A]] - )(implicit F: Sync[F]): Serializer[F, A] = - Serializer.instance { (topic, headers, a) => + def topic[T >: SerdeType.KeyOrValue <: SerdeType, F[_], A]( + f: PartialFunction[String, GenSerializer[T, F, A]] + )(implicit F: Sync[F]): GenSerializer[T, F, A] = + Serializer.instance[F, A] { (topic, headers, a) => f.applyOrElse(topic, unexpectedTopic) .serialize(topic, headers, a) } @@ -215,14 +215,14 @@ object GenSerializer { * The option [[Serializer]] serializes `None` as `null`, and * serializes `Some` values using the serializer for type `A`. */ - implicit def option[F[_], A]( - implicit serializer: Serializer[F, A] - ): Serializer[F, Option[A]] = + implicit def option[T <: SerdeType, F[_], A]( + implicit serializer: GenSerializer[T, F, A] + ): GenSerializer[T, F, Option[A]] = serializer.option - implicit def contravariant[F[_]]: Contravariant[Serializer[F, *]] = - new Contravariant[Serializer[F, *]] { - override def contramap[A, B](serializer: Serializer[F, A])(f: B => A): Serializer[F, B] = + implicit def contravariant[T <: SerdeType, F[_]]: Contravariant[GenSerializer[T, F, *]] = + new Contravariant[GenSerializer[T, F, *]] { + override def contramap[A, B](serializer: GenSerializer[T, F, A])(f: B => A): GenSerializer[T, F, B] = serializer.contramap(f) } From 6a9868b2fa65408e252fac6ce18016b074de8571 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:33:42 +0000 Subject: [PATCH 044/162] Specialize Deserializer types for Key or Value --- .../main/scala/fs2/kafka/Deserializer.scala | 52 +++++++++---------- .../src/main/scala/fs2/kafka/package.scala | 5 ++ 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala index ba397143a..2a68cebcf 100644 --- a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala @@ -16,7 +16,7 @@ import java.util.UUID * Functional composable Kafka key- and record deserializer with * support for effect types. */ -sealed abstract class Deserializer[F[_], A] { +sealed abstract class GenDeserializer[+T <: SerdeType, F[_], A] { /** * Attempts to deserialize the specified bytes into a value of @@ -29,27 +29,27 @@ sealed abstract class Deserializer[F[_], A] { * Creates a new [[Deserializer]] which applies the specified * function to the result of this [[Deserializer]]. */ - def map[B](f: A => B): Deserializer[F, B] + def map[B](f: A => B): GenDeserializer[T, F, B] /** * Creates a new [[Deserializer]] by first deserializing * with this [[Deserializer]] and then using the result * as input to the specified function. */ - def flatMap[B](f: A => Deserializer[F, B]): Deserializer[F, B] + def flatMap[T0 >: T <: SerdeType, B](f: A => GenDeserializer[T0, F, B]): GenDeserializer[T0, F, B] /** * Creates a new [[Deserializer]] which deserializes both using * this [[Deserializer]] and that [[Deserializer]], and returns * both results in a tuple. */ - def product[B](that: Deserializer[F, B]): Deserializer[F, (A, B)] + def product[T0 >: T <: SerdeType, B](that: GenDeserializer[T0, F, B]): GenDeserializer[T0, F, (A, B)] /** * Creates a new [[Deserializer]] which handles errors by * turning them into `Either` values. */ - def attempt: Deserializer[F, Either[Throwable, A]] + def attempt: GenDeserializer[T, F, Either[Throwable, A]] /** * Creates a new [[Deserializer]] which returns `None` when the @@ -65,7 +65,7 @@ sealed abstract class Deserializer[F[_], A] { def suspend: Deserializer[F, A] } -object Deserializer { +object GenDeserializer { def apply[F[_], A](implicit deserializer: Deserializer[F, A]): Deserializer[F, A] = deserializer /** Alias for [[Deserializer#identity]]. */ @@ -137,14 +137,14 @@ object Deserializer { deserialize(topic, headers, bytes).map(f) } - override def flatMap[B](f: A => Deserializer[F, B]): Deserializer[F, B] = + override def flatMap[T0 >: SerdeType.KeyOrValue <: SerdeType, B](f: A => GenDeserializer[T0, F, B]): GenDeserializer[T0, F, B] = Deserializer.instance { (topic, headers, bytes) => deserialize(topic, headers, bytes).flatMap { a => f(a).deserialize(topic, headers, bytes) } } - override def product[B](that: Deserializer[F, B]): Deserializer[F, (A, B)] = + override def product[T0 >: SerdeType.KeyOrValue <: SerdeType, B](that: GenDeserializer[T0, F, B]): Deserializer[F, (A, B)] = Deserializer.instance { (topic, headers, bytes) => val a = deserialize(topic, headers, bytes) val b = that.deserialize(topic, headers, bytes) @@ -191,9 +191,9 @@ object Deserializer { * [[Deserializer]]s depending on the Kafka topic name * from which the serialized bytes came. */ - def topic[F[_], A]( - f: PartialFunction[String, Deserializer[F, A]] - )(implicit F: Sync[F]): Deserializer[F, A] = + def topic[T >: SerdeType.KeyOrValue <: SerdeType, F[_], A]( + f: PartialFunction[String, GenDeserializer[T, F, A]] + )(implicit F: Sync[F]): GenDeserializer[T, F, A] = Deserializer.instance { (topic, headers, bytes) => f.applyOrElse(topic, unexpectedTopic) .deserialize(topic, headers, bytes) @@ -232,42 +232,42 @@ object Deserializer { ): Deserializer[F, Option[A]] = deserializer.option - implicit def monadError[F[_]](implicit F: Sync[F]): MonadError[Deserializer[F, *], Throwable] = - new MonadError[Deserializer[F, *], Throwable] { - override def pure[A](a: A): Deserializer[F, A] = + implicit def monadError[T >: SerdeType.KeyOrValue <: SerdeType, F[_]](implicit F: Sync[F]): MonadError[GenDeserializer[T, F, *], Throwable] = + new MonadError[GenDeserializer[T, F, *], Throwable] { + override def pure[A](a: A): GenDeserializer[T, F, A] = Deserializer.const(a) override def map[A, B]( - deserializer: Deserializer[F, A] - )(f: A => B): Deserializer[F, B] = + deserializer: GenDeserializer[T, F, A] + )(f: A => B): GenDeserializer[T, F, B] = deserializer.map(f) override def flatMap[A, B]( - deserializer: Deserializer[F, A] - )(f: A => Deserializer[F, B]): Deserializer[F, B] = + deserializer: GenDeserializer[T, F, A] + )(f: A => GenDeserializer[T, F, B]): GenDeserializer[T, F, B] = deserializer.flatMap(f) override def product[A, B]( - first: Deserializer[F, A], - second: Deserializer[F, B] - ): Deserializer[F, (A, B)] = + first: GenDeserializer[T, F, A], + second: GenDeserializer[T, F, B] + ): GenDeserializer[T, F, (A, B)] = first.product(second) - override def tailRecM[A, B](a: A)(f: A => Deserializer[F, Either[A, B]]): Deserializer[F, B] = + override def tailRecM[A, B](a: A)(f: A => GenDeserializer[T, F, Either[A, B]]): GenDeserializer[T, F, B] = Deserializer.instance { (topic, headers, bytes) => F.tailRecM(a)(f(_).deserialize(topic, headers, bytes)) } - override def handleErrorWith[A](fa: Deserializer[F, A])( - f: Throwable => Deserializer[F, A] - ): Deserializer[F, A] = + override def handleErrorWith[A](fa: GenDeserializer[T, F, A])( + f: Throwable => GenDeserializer[T, F, A] + ): GenDeserializer[T, F, A] = Deserializer.instance { (topic, headers, bytes) => F.handleErrorWith(fa.deserialize(topic, headers, bytes)) { throwable => f(throwable).deserialize(topic, headers, bytes) } } - override def raiseError[A](e: Throwable): Deserializer[F, A] = + override def raiseError[A](e: Throwable): GenDeserializer[T, F, A] = Deserializer.fail(e) } diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 5ce57b0e4..de7261a16 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -63,4 +63,9 @@ package object kafka { type KeySerializer[F[_], A] = GenSerializer[SerdeType.Key, F, A] type ValueSerializer[F[_], A] = GenSerializer[SerdeType.Value, F, A] val Serializer: GenSerializer.type = GenSerializer + + type Deserializer[F[_], A] = GenDeserializer[SerdeType.KeyOrValue, F, A] + type KeyDeserializer[F[_], A] = GenDeserializer[SerdeType.Key, F, A] + type ValueDeserializer[F[_], A] = GenDeserializer[SerdeType.Value, F, A] + val Deserializer: GenDeserializer.type = GenDeserializer } From c817d288d6869cdb152d243d3c9d40cdd42d0a57 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:37:41 +0000 Subject: [PATCH 045/162] rework KeyOrValue --- .../main/scala/fs2/kafka/Deserializer.scala | 24 ++++++++++++------- .../src/main/scala/fs2/kafka/Serializer.scala | 15 ++++-------- .../src/main/scala/fs2/kafka/package.scala | 19 ++++++++++----- 3 files changed, 33 insertions(+), 25 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala index 2a68cebcf..adcde9f2b 100644 --- a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala @@ -16,7 +16,7 @@ import java.util.UUID * Functional composable Kafka key- and record deserializer with * support for effect types. */ -sealed abstract class GenDeserializer[+T <: SerdeType, F[_], A] { +sealed abstract class GenDeserializer[-T <: KeyOrValue, F[_], A] { /** * Attempts to deserialize the specified bytes into a value of @@ -36,14 +36,14 @@ sealed abstract class GenDeserializer[+T <: SerdeType, F[_], A] { * with this [[Deserializer]] and then using the result * as input to the specified function. */ - def flatMap[T0 >: T <: SerdeType, B](f: A => GenDeserializer[T0, F, B]): GenDeserializer[T0, F, B] + def flatMap[T0 <: T, B](f: A => GenDeserializer[T0, F, B]): GenDeserializer[T0, F, B] /** * Creates a new [[Deserializer]] which deserializes both using * this [[Deserializer]] and that [[Deserializer]], and returns * both results in a tuple. */ - def product[T0 >: T <: SerdeType, B](that: GenDeserializer[T0, F, B]): GenDeserializer[T0, F, (A, B)] + def product[T0 <: T, B](that: GenDeserializer[T0, F, B]): GenDeserializer[T0, F, (A, B)] /** * Creates a new [[Deserializer]] which handles errors by @@ -137,14 +137,18 @@ object GenDeserializer { deserialize(topic, headers, bytes).map(f) } - override def flatMap[T0 >: SerdeType.KeyOrValue <: SerdeType, B](f: A => GenDeserializer[T0, F, B]): GenDeserializer[T0, F, B] = + override def flatMap[T0 <: KeyOrValue, B]( + f: A => GenDeserializer[T0, F, B] + ): GenDeserializer[T0, F, B] = Deserializer.instance { (topic, headers, bytes) => deserialize(topic, headers, bytes).flatMap { a => f(a).deserialize(topic, headers, bytes) } } - override def product[T0 >: SerdeType.KeyOrValue <: SerdeType, B](that: GenDeserializer[T0, F, B]): Deserializer[F, (A, B)] = + override def product[T0 <: KeyOrValue, B]( + that: GenDeserializer[T0, F, B] + ): Deserializer[F, (A, B)] = Deserializer.instance { (topic, headers, bytes) => val a = deserialize(topic, headers, bytes) val b = that.deserialize(topic, headers, bytes) @@ -191,7 +195,7 @@ object GenDeserializer { * [[Deserializer]]s depending on the Kafka topic name * from which the serialized bytes came. */ - def topic[T >: SerdeType.KeyOrValue <: SerdeType, F[_], A]( + def topic[T <: KeyOrValue, F[_], A]( f: PartialFunction[String, GenDeserializer[T, F, A]] )(implicit F: Sync[F]): GenDeserializer[T, F, A] = Deserializer.instance { (topic, headers, bytes) => @@ -232,7 +236,9 @@ object GenDeserializer { ): Deserializer[F, Option[A]] = deserializer.option - implicit def monadError[T >: SerdeType.KeyOrValue <: SerdeType, F[_]](implicit F: Sync[F]): MonadError[GenDeserializer[T, F, *], Throwable] = + implicit def monadError[T <: KeyOrValue, F[_]]( + implicit F: Sync[F] + ): MonadError[GenDeserializer[T, F, *], Throwable] = new MonadError[GenDeserializer[T, F, *], Throwable] { override def pure[A](a: A): GenDeserializer[T, F, A] = Deserializer.const(a) @@ -253,7 +259,9 @@ object GenDeserializer { ): GenDeserializer[T, F, (A, B)] = first.product(second) - override def tailRecM[A, B](a: A)(f: A => GenDeserializer[T, F, Either[A, B]]): GenDeserializer[T, F, B] = + override def tailRecM[A, B]( + a: A + )(f: A => GenDeserializer[T, F, Either[A, B]]): GenDeserializer[T, F, B] = Deserializer.instance { (topic, headers, bytes) => F.tailRecM(a)(f(_).deserialize(topic, headers, bytes)) } diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 0ca951724..3c519e18e 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -12,14 +12,7 @@ import cats.syntax.all._ import java.nio.charset.{Charset, StandardCharsets} import java.util.UUID -sealed trait SerdeType -object SerdeType { - sealed trait Key extends SerdeType - sealed trait Value extends SerdeType - sealed trait KeyOrValue extends Key with Value -} - -sealed abstract class GenSerializer[+T <: SerdeType, F[_], A] { +sealed abstract class GenSerializer[-T <: KeyOrValue, F[_], A] { /** * Attempts to serialize the specified value of type `A` into * bytes. The Kafka topic name, to which the serialized bytes @@ -180,7 +173,7 @@ object GenSerializer { * [[Serializer]]s depending on the Kafka topic name to * which the bytes are going to be sent. */ - def topic[T >: SerdeType.KeyOrValue <: SerdeType, F[_], A]( + def topic[T <: KeyOrValue, F[_], A]( f: PartialFunction[String, GenSerializer[T, F, A]] )(implicit F: Sync[F]): GenSerializer[T, F, A] = Serializer.instance[F, A] { (topic, headers, a) => @@ -215,12 +208,12 @@ object GenSerializer { * The option [[Serializer]] serializes `None` as `null`, and * serializes `Some` values using the serializer for type `A`. */ - implicit def option[T <: SerdeType, F[_], A]( + implicit def option[T <: KeyOrValue, F[_], A]( implicit serializer: GenSerializer[T, F, A] ): GenSerializer[T, F, Option[A]] = serializer.option - implicit def contravariant[T <: SerdeType, F[_]]: Contravariant[GenSerializer[T, F, *]] = + implicit def contravariant[T <: KeyOrValue, F[_]]: Contravariant[GenSerializer[T, F, *]] = new Contravariant[GenSerializer[T, F, *]] { override def contramap[A, B](serializer: GenSerializer[T, F, A])(f: B => A): GenSerializer[T, F, B] = serializer.contramap(f) diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index de7261a16..1887548b9 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -59,13 +59,20 @@ package object kafka { ): Pipe[F, CommittableOffset[F], Unit] = _.groupWithin(n, d).evalMap(CommittableOffsetBatch.fromFoldable(_).commit) - type Serializer[F[_], A] = GenSerializer[SerdeType.KeyOrValue, F, A] - type KeySerializer[F[_], A] = GenSerializer[SerdeType.Key, F, A] - type ValueSerializer[F[_], A] = GenSerializer[SerdeType.Value, F, A] + type Serializer[F[_], A] = GenSerializer[KeyOrValue, F, A] + type KeySerializer[F[_], A] = GenSerializer[Key, F, A] + type ValueSerializer[F[_], A] = GenSerializer[Value, F, A] val Serializer: GenSerializer.type = GenSerializer - type Deserializer[F[_], A] = GenDeserializer[SerdeType.KeyOrValue, F, A] - type KeyDeserializer[F[_], A] = GenDeserializer[SerdeType.Key, F, A] - type ValueDeserializer[F[_], A] = GenDeserializer[SerdeType.Value, F, A] + type Deserializer[F[_], A] = GenDeserializer[KeyOrValue, F, A] + type KeyDeserializer[F[_], A] = GenDeserializer[Key, F, A] + type ValueDeserializer[F[_], A] = GenDeserializer[Value, F, A] val Deserializer: GenDeserializer.type = GenDeserializer } + +package kafka { + sealed trait KeyOrValue + sealed trait Key extends KeyOrValue + sealed trait Value extends KeyOrValue + +} From c6567df866065309924c8e2ed1fe87f3da7d58dc Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:47:57 +0000 Subject: [PATCH 046/162] Stronger types in RecordDeserializer --- .../main/scala/fs2/kafka/ConsumerRecord.scala | 8 ++++---- .../scala/fs2/kafka/ConsumerSettings.scala | 20 +++++++++---------- .../scala/fs2/kafka/RecordDeserializer.scala | 20 +++++++++---------- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala index 5de7bba55..5353f498c 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala @@ -167,8 +167,8 @@ object ConsumerRecord { private[this] def deserializeFromBytes[F[_], K, V]( record: KafkaByteConsumerRecord, headers: Headers, - keyDeserializer: Deserializer[F, K], - valueDeserializer: Deserializer[F, V] + keyDeserializer: KeyDeserializer[F, K], + valueDeserializer: ValueDeserializer[F, V] )(implicit F: Apply[F]): F[(K, V)] = { val key = keyDeserializer.deserialize(record.topic, headers, record.key) val value = valueDeserializer.deserialize(record.topic, headers, record.value) @@ -177,8 +177,8 @@ object ConsumerRecord { private[kafka] def fromJava[F[_], K, V]( record: KafkaByteConsumerRecord, - keyDeserializer: Deserializer[F, K], - valueDeserializer: Deserializer[F, V] + keyDeserializer: KeyDeserializer[F, K], + valueDeserializer: ValueDeserializer[F, V] )(implicit F: Apply[F]): F[ConsumerRecord[K, V]] = { val headers = record.headers.asScala deserializeFromBytes(record, headers, keyDeserializer, valueDeserializer).map { diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index 6e1582f83..6c7b52552 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -40,12 +40,12 @@ sealed abstract class ConsumerSettings[F[_], K, V] { /** * The `Deserializer` to use for deserializing record keys. */ - def keyDeserializer: F[Deserializer[F, K]] + def keyDeserializer: F[KeyDeserializer[F, K]] /** * The `Deserializer` to use for deserializing record values. */ - def valueDeserializer: F[Deserializer[F, V]] + def valueDeserializer: F[ValueDeserializer[F, V]] /** * A custom `ExecutionContext` to use for blocking Kafka operations. If not @@ -395,8 +395,8 @@ sealed abstract class ConsumerSettings[F[_], K, V] { object ConsumerSettings { private[this] final case class ConsumerSettingsImpl[F[_], K, V]( - override val keyDeserializer: F[Deserializer[F, K]], - override val valueDeserializer: F[Deserializer[F, V]], + override val keyDeserializer: F[KeyDeserializer[F, K]], + override val valueDeserializer: F[ValueDeserializer[F, V]], override val customBlockingContext: Option[ExecutionContext], override val properties: Map[String, String], override val closeTimeout: FiniteDuration, @@ -530,8 +530,8 @@ object ConsumerSettings { } private[this] def create[F[_], K, V]( - keyDeserializer: F[Deserializer[F, K]], - valueDeserializer: F[Deserializer[F, V]] + keyDeserializer: F[KeyDeserializer[F, K]], + valueDeserializer: F[ValueDeserializer[F, V]] ): ConsumerSettings[F, K, V] = ConsumerSettingsImpl( customBlockingContext = None, @@ -551,8 +551,8 @@ object ConsumerSettings { ) def apply[F[_], K, V]( - keyDeserializer: Deserializer[F, K], - valueDeserializer: Deserializer[F, V] + keyDeserializer: KeyDeserializer[F, K], + valueDeserializer: ValueDeserializer[F, V] )(implicit F: Applicative[F]): ConsumerSettings[F, K, V] = create( keyDeserializer = F.pure(keyDeserializer), @@ -561,7 +561,7 @@ object ConsumerSettings { def apply[F[_], K, V]( keyDeserializer: RecordDeserializer[F, K], - valueDeserializer: Deserializer[F, V] + valueDeserializer: ValueDeserializer[F, V] )(implicit F: Applicative[F]): ConsumerSettings[F, K, V] = create( keyDeserializer = keyDeserializer.forKey, @@ -569,7 +569,7 @@ object ConsumerSettings { ) def apply[F[_], K, V]( - keyDeserializer: Deserializer[F, K], + keyDeserializer: KeyDeserializer[F, K], valueDeserializer: RecordDeserializer[F, V] )(implicit F: Applicative[F]): ConsumerSettings[F, K, V] = create( diff --git a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala index cce06abeb..4de94a483 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala @@ -15,9 +15,9 @@ import cats.{Applicative, Functor} * a creation effect. */ sealed abstract class RecordDeserializer[F[_], A] { - def forKey: F[Deserializer[F, A]] + def forKey: F[KeyDeserializer[F, A]] - def forValue: F[Deserializer[F, A]] + def forValue: F[ValueDeserializer[F, A]] /** * Returns a new [[RecordDeserializer]] instance that will catch deserialization @@ -25,7 +25,7 @@ sealed abstract class RecordDeserializer[F[_], A] { * causing the consumer to fail. */ final def attempt(implicit F: Functor[F]): RecordDeserializer[F, Either[Throwable, A]] = - RecordDeserializer.instance(forKey.map(_.attempt), forValue.map(_.attempt)) + RecordDeserializer.instance(forKey.map((_: KeyDeserializer[F, A]).attempt), forValue.map(_.attempt)) } object RecordDeserializer { @@ -34,26 +34,26 @@ object RecordDeserializer { ): RecordDeserializer[F, A] = deserializer - def const[F[_], A]( + def const[F[_]: Functor, A]( deserializer: => F[Deserializer[F, A]] ): RecordDeserializer[F, A] = RecordDeserializer.instance( - forKey = deserializer, - forValue = deserializer + forKey = deserializer.widen, + forValue = deserializer.widen ) def instance[F[_], A]( - forKey: => F[Deserializer[F, A]], - forValue: => F[Deserializer[F, A]] + forKey: => F[KeyDeserializer[F, A]], + forValue: => F[ValueDeserializer[F, A]] ): RecordDeserializer[F, A] = { def _forKey = forKey def _forValue = forValue new RecordDeserializer[F, A] { - override def forKey: F[Deserializer[F, A]] = + override def forKey: F[KeyDeserializer[F, A]] = _forKey - override def forValue: F[Deserializer[F, A]] = + override def forValue: F[ValueDeserializer[F, A]] = _forValue override def toString: String = From 99f0d2896864d3831a62e7132ab8a414235eaa76 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:51:04 +0000 Subject: [PATCH 047/162] Add scaladoc for KeyOrValue --- modules/core/src/main/scala/fs2/kafka/package.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 1887548b9..eaabf5f13 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -71,8 +71,10 @@ package object kafka { } package kafka { + + /** Phantom types to indicate whether a [[Serializer]]/[[Deserializer]] if for keys, values, or both + */ sealed trait KeyOrValue sealed trait Key extends KeyOrValue sealed trait Value extends KeyOrValue - } From 441bf1c5fcb3cbbb66253258cda8ca925ed66c86 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:51:31 +0000 Subject: [PATCH 048/162] Formatting --- .../core/src/main/scala/fs2/kafka/RecordDeserializer.scala | 2 +- modules/core/src/main/scala/fs2/kafka/Serializer.scala | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala index 4de94a483..652194947 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala @@ -25,7 +25,7 @@ sealed abstract class RecordDeserializer[F[_], A] { * causing the consumer to fail. */ final def attempt(implicit F: Functor[F]): RecordDeserializer[F, Either[Throwable, A]] = - RecordDeserializer.instance(forKey.map((_: KeyDeserializer[F, A]).attempt), forValue.map(_.attempt)) + RecordDeserializer.instance(forKey.map(_.attempt), forValue.map(_.attempt)) } object RecordDeserializer { diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 3c519e18e..61a23bdc1 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -13,6 +13,7 @@ import java.nio.charset.{Charset, StandardCharsets} import java.util.UUID sealed abstract class GenSerializer[-T <: KeyOrValue, F[_], A] { + /** * Attempts to serialize the specified value of type `A` into * bytes. The Kafka topic name, to which the serialized bytes @@ -215,7 +216,9 @@ object GenSerializer { implicit def contravariant[T <: KeyOrValue, F[_]]: Contravariant[GenSerializer[T, F, *]] = new Contravariant[GenSerializer[T, F, *]] { - override def contramap[A, B](serializer: GenSerializer[T, F, A])(f: B => A): GenSerializer[T, F, B] = + override def contramap[A, B]( + serializer: GenSerializer[T, F, A] + )(f: B => A): GenSerializer[T, F, B] = serializer.contramap(f) } From d47d3313baaba390017224f30d0157146b80f891 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 12 Mar 2022 22:56:58 +0000 Subject: [PATCH 049/162] Fix deserializer types --- .../main/scala/fs2/kafka/internal/KafkaConsumerActor.scala | 4 ++-- .../src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index 0ed372787..dea97dd90 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -49,8 +49,8 @@ import scala.collection.immutable.SortedSet */ private[kafka] final class KafkaConsumerActor[F[_], K, V]( settings: ConsumerSettings[F, K, V], - keyDeserializer: Deserializer[F, K], - valueDeserializer: Deserializer[F, V], + keyDeserializer: KeyDeserializer[F, K], + valueDeserializer: ValueDeserializer[F, V], ref: Ref[F, State[F, K, V]], requests: Queue[F, Request[F, K, V]], withConsumer: WithConsumer[F] diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index c00918248..6405c077e 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -46,8 +46,8 @@ final class AvroDeserializer[A] private[vulcan] ( } RecordDeserializer.instance( - forKey = createDeserializer(true), - forValue = createDeserializer(false) + forKey = createDeserializer(true).widen, + forValue = createDeserializer(false).widen ) case Left(error) => From cc0a92c6bafe780356a554848ae4b8f441d2746d Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 11:39:41 +0000 Subject: [PATCH 050/162] Fix producers.md --- docs/src/main/mdoc/producers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/main/mdoc/producers.md b/docs/src/main/mdoc/producers.md index 798cfe404..1de1d6a0e 100644 --- a/docs/src/main/mdoc/producers.md +++ b/docs/src/main/mdoc/producers.md @@ -48,7 +48,7 @@ Serializer.lift[IO, String](s => IO.pure(s.getBytes("UTF-8"))) To support different serializers for different topics, use `topic` to pattern match on the topic name. ```scala mdoc:silent -Serializer.topic[IO, Int] { +Serializer.topic[KeyOrValue, IO, Int] { case "first" => Serializer[IO, String].contramap(_.show) case "second" => Serializer[IO, Int] } From f7b1eef54dcacd9bc5dfd53bdb93c58a1ff60106 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 11:40:21 +0000 Subject: [PATCH 051/162] Add entries to gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index f7f4fa41e..8fc2c65b6 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ target/ .metals/ .vscode/ +.bloop/ +metals.sbt \ No newline at end of file From ef0f6b739ca95de5aaa5d7499819f96a7aab0f2b Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 12:26:18 +0000 Subject: [PATCH 052/162] Fix consumers.md --- docs/src/main/mdoc/consumers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/main/mdoc/consumers.md b/docs/src/main/mdoc/consumers.md index a131a167a..167745b68 100644 --- a/docs/src/main/mdoc/consumers.md +++ b/docs/src/main/mdoc/consumers.md @@ -48,7 +48,7 @@ Deserializer.lift(bytes => IO.pure(bytes.dropWhile(_ == 0))) To support different deserializers for different topics, use `topic` to pattern match on the topic name. ```scala mdoc:silent -Deserializer.topic[IO, String] { +Deserializer.topic[KeyOrValue, IO, String] { case "first" => Deserializer[IO, String] case "second" => Deserializer[IO, Int].map(_.show) } From c00970303ce18d3b09c9abeea559ae6ca798f452 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 14:36:38 +0000 Subject: [PATCH 053/162] Remove passthrough from ProducerRecords --- docs/src/main/mdoc/producers.md | 8 +-- docs/src/main/mdoc/quick-example.md | 6 +- .../main/scala/fs2/kafka/KafkaProducer.scala | 54 ++++++++++------ .../scala/fs2/kafka/ProducerRecords.scala | 63 +++++-------------- .../main/scala/fs2/kafka/ProducerResult.scala | 36 +++++------ .../kafka/TransactionalKafkaProducer.scala | 22 +++---- .../kafka/TransactionalProducerRecords.scala | 57 +++++------------ .../scala/fs2/kafka/KafkaProducerSpec.scala | 39 ++++++------ .../scala/fs2/kafka/ProducerRecordsSpec.scala | 15 ----- .../scala/fs2/kafka/ProducerResultSpec.scala | 12 ++-- .../TransactionalKafkaProducerSpec.scala | 37 ++++++----- .../TransactionalProducerRecordsSpec.scala | 7 --- .../rules/src/main/scala/fix/Fs2Kafka.scala | 2 +- 13 files changed, 145 insertions(+), 213 deletions(-) diff --git a/docs/src/main/mdoc/producers.md b/docs/src/main/mdoc/producers.md index 798cfe404..359ca225d 100644 --- a/docs/src/main/mdoc/producers.md +++ b/docs/src/main/mdoc/producers.md @@ -174,7 +174,7 @@ object ProduceExample extends IOApp { val key = committable.record.key val value = committable.record.value val record = ProducerRecord("topic", key, value) - ProducerRecords.one(record, committable.offset) + ProducerRecords.one(record) } .through(KafkaProducer.pipe(producerSettings)) @@ -204,7 +204,7 @@ object PartitionedProduceExample extends IOApp { val key = committable.record.key val value = committable.record.value val record = ProducerRecord("topic", key, value) - ProducerRecords.one(record, committable.offset) + ProducerRecords.one(record) } .through(KafkaProducer.pipe(producerSettings, producer)) } @@ -231,7 +231,7 @@ object KafkaProducerProduceExample extends IOApp { val key = committable.record.key val value = committable.record.value val record = ProducerRecord("topic", key, value) - ProducerRecords.one(record, committable.offset) + ProducerRecords.one(record) } .evalMap(producer.produce) .groupWithin(500, 15.seconds) @@ -261,7 +261,7 @@ object KafkaProducerProduceFlattenExample extends IOApp { val key = committable.record.key val value = committable.record.value val record = ProducerRecord("topic", key, value) - ProducerRecords.one(record, committable.offset) + ProducerRecords.one(record) } .evalMap { record => producer.produce(record).flatten diff --git a/docs/src/main/mdoc/quick-example.md b/docs/src/main/mdoc/quick-example.md index b2a0f5110..a02fe7e94 100644 --- a/docs/src/main/mdoc/quick-example.md +++ b/docs/src/main/mdoc/quick-example.md @@ -37,11 +37,11 @@ object Main extends IOApp { processRecord(committable.record) .map { case (key, value) => val record = ProducerRecord("topic", key, value) - ProducerRecords.one(record, committable.offset) + committable.offset -> ProducerRecords.one(record) } } - .through(KafkaProducer.pipe(producerSettings)) - .map(_.passthrough) + .through(KafkaProducer.pipeWithPassthrough(producerSettings)) + .map(_._1) .through(commitBatchWithin(500, 15.seconds)) stream.compile.drain.as(ExitCode.Success) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index c1a9d7fad..f3d50a8da 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -54,9 +54,9 @@ abstract class KafkaProducer[F[_], K, V] { * have `otherAction` execute after records have been sent, * but losing the order of produced records. */ - def produce[P]( - records: ProducerRecords[P, K, V] - ): F[F[ProducerResult[P, K, V]]] + def produce( + records: ProducerRecords[K, V] + ): F[F[ProducerResult[K, V]]] } object KafkaProducer { @@ -69,7 +69,7 @@ object KafkaProducer { * see [[KafkaProducer.produce]] for general semantics. */ def produceOne_(record: ProducerRecord[K, V])(implicit F: Functor[F]): F[F[RecordMetadata]] = - produceOne(record, ()).map(_.map { res => + produceOne(record).map(_.map { res => res.records.head.get._2 //Should always be present so get is ok }) @@ -85,7 +85,7 @@ object KafkaProducer { * see [[KafkaProducer.produce]] for general semantics. */ def produce_( - records: ProducerRecords[_, K, V] + records: ProducerRecords[K, V] )(implicit F: Functor[F]): F[F[Chunk[(ProducerRecord[K, V], RecordMetadata)]]] = producer.produce(records).map(_.map(_.records)) @@ -93,19 +93,18 @@ object KafkaProducer { * Produce a single record to the specified topic using the provided key and value, * see [[KafkaProducer.produce]] for general semantics. */ - def produceOne[P]( + def produceOne( topic: String, key: K, - value: V, - passthrough: P - ): F[F[ProducerResult[P, K, V]]] = - produceOne(ProducerRecord(topic, key, value), passthrough) + value: V + ): F[F[ProducerResult[K, V]]] = + produceOne(ProducerRecord(topic, key, value)) /** * Produce a single [[ProducerRecord]], see [[KafkaProducer.produce]] for general semantics. */ - def produceOne[P](record: ProducerRecord[K, V], passthrough: P): F[F[ProducerResult[P, K, V]]] = - producer.produce(ProducerRecords.one(record, passthrough)) + def produceOne(record: ProducerRecord[K, V]): F[F[ProducerResult[K, V]]] = + producer.produce(ProducerRecords.one(record)) } @@ -145,13 +144,13 @@ object KafkaProducer { valueSerializer: Serializer[F, V] ): KafkaProducer.Metrics[F, K, V] = new KafkaProducer.Metrics[F, K, V] { - override def produce[P]( - records: ProducerRecords[P, K, V] - ): F[F[ProducerResult[P, K, V]]] = + override def produce( + records: ProducerRecords[K, V] + ): F[F[ProducerResult[K, V]]] = withProducer { (producer, blocking) => records.records .traverse(produceRecord(keySerializer, valueSerializer, producer, blocking)) - .map(_.sequence.map(ProducerResult(_, records.passthrough))) + .map(_.sequence.map(ProducerResult(_))) } override def metrics: F[Map[MetricName, Metric]] = @@ -205,25 +204,40 @@ object KafkaProducer { * produces record in batches, limiting the number of records * in the same batch using [[ProducerSettings#parallelism]]. */ - def pipe[F[_], K, V, P]( + def pipe[F[_], K, V]( settings: ProducerSettings[F, K, V] )( implicit F: Async[F], mk: MkProducer[F] - ): Pipe[F, ProducerRecords[P, K, V], ProducerResult[P, K, V]] = + ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = records => stream(settings).flatMap(pipe(settings, _).apply(records)) + def pipeWithPassthrough[F[_], K, V, P]( + settings: ProducerSettings[F, K, V] + )( + implicit F: Async[F], + mk: MkProducer[F] + ): Pipe[F, (P, ProducerRecords[K, V]), (P, ProducerResult[K, V])] = + records => stream(settings).flatMap(pipeWithPassthrough(settings, _).apply(records)) + /** * Produces records in batches using the provided [[KafkaProducer]]. * The number of records in the same batch is limited using the * [[ProducerSettings#parallelism]] setting. */ - def pipe[F[_]: Concurrent, K, V, P]( + def pipe[F[_]: Concurrent, K, V]( settings: ProducerSettings[F, K, V], producer: KafkaProducer[F, K, V] - ): Pipe[F, ProducerRecords[P, K, V], ProducerResult[P, K, V]] = + ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = _.evalMap(producer.produce).mapAsync(settings.parallelism)(identity) + def pipeWithPassthrough[F[_]: Concurrent, K, V, P]( + settings: ProducerSettings[F, K, V], + producer: KafkaProducer[F, K, V] + ): Pipe[F, (P, ProducerRecords[K, V]), (P, ProducerResult[K, V])] = + _.evalMap { case (p, records) => producer.produce(records).map(_.tupleLeft(p)) } + .mapAsync(settings.parallelism)(identity) + private[this] def serializeToBytes[F[_], K, V]( keySerializer: Serializer[F, K], valueSerializer: Serializer[F, V], diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala index c24bec44a..09ea871a0 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala @@ -29,48 +29,31 @@ import scala.collection.mutable * The [[passthrough]] and [[records]] can be retrieved from an * existing [[ProducerRecords]] instance.
*/ -sealed abstract class ProducerRecords[+P, +K, +V] { +sealed abstract class ProducerRecords[+K, +V] { /** The records to produce. Can be empty for passthrough-only. */ def records: Chunk[ProducerRecord[K, V]] - - /** The passthrough to emit once all [[records]] have been produced. */ - def passthrough: P } object ProducerRecords { - private[this] final class ProducerRecordsImpl[+P, +K, +V]( - override val records: Chunk[ProducerRecord[K, V]], - override val passthrough: P - ) extends ProducerRecords[P, K, V] { + private[this] final class ProducerRecordsImpl[+K, +V]( + override val records: Chunk[ProducerRecord[K, V]] + ) extends ProducerRecords[K, V] { override def toString: String = - if (records.isEmpty) s"ProducerRecords(, $passthrough)" - else records.mkString("ProducerRecords(", ", ", s", $passthrough)") + if (records.isEmpty) s"ProducerRecords()" + else records.mkString("ProducerRecords(", ", ", ")") } /** * Creates a new [[ProducerRecords]] for producing zero or more * `ProducerRecords`s, then emitting a [[ProducerResult]] with - * the results and `Unit` passthrough value. + * the results and specified passthrough value. */ def apply[F[+_], K, V]( records: F[ProducerRecord[K, V]] )( implicit F: Traverse[F] - ): ProducerRecords[Unit, K, V] = - apply(records, ()) - - /** - * Creates a new [[ProducerRecords]] for producing zero or more - * `ProducerRecords`s, then emitting a [[ProducerResult]] with - * the results and specified passthrough value. - */ - def apply[F[+_], P, K, V]( - records: F[ProducerRecord[K, V]], - passthrough: P - )( - implicit F: Traverse[F] - ): ProducerRecords[P, K, V] = { + ): ProducerRecords[K, V] = { val numRecords = F.size(records).toInt val chunk = if (numRecords <= 1) { F.get(records)(0) match { @@ -86,37 +69,25 @@ object ProducerRecords { } Chunk.array(buf.toArray) } - new ProducerRecordsImpl(chunk, passthrough) + new ProducerRecordsImpl(chunk) } /** * Creates a new [[ProducerRecords]] for producing exactly one * `ProducerRecord`, then emitting a [[ProducerResult]] with - * the result and `Unit` passthrough value. + * the result and specified passthrough value. */ def one[K, V]( record: ProducerRecord[K, V] - ): ProducerRecords[Unit, K, V] = - one(record, ()) - - /** - * Creates a new [[ProducerRecords]] for producing exactly one - * `ProducerRecord`, then emitting a [[ProducerResult]] with - * the result and specified passthrough value. - */ - def one[P, K, V]( - record: ProducerRecord[K, V], - passthrough: P - ): ProducerRecords[P, K, V] = - new ProducerRecordsImpl(Chunk.singleton(record), passthrough) + ): ProducerRecords[K, V] = + new ProducerRecordsImpl(Chunk.singleton(record)) - implicit def producerRecordsShow[P, K, V]( + implicit def producerRecordsShow[K, V]( implicit K: Show[K], - V: Show[V], - P: Show[P] - ): Show[ProducerRecords[P, K, V]] = Show.show { records => - if (records.records.isEmpty) show"ProducerRecords(, ${records.passthrough})" - else records.records.mkStringShow("ProducerRecords(", ", ", s", ${records.passthrough})") + V: Show[V] + ): Show[ProducerRecords[K, V]] = Show.show { records => + if (records.records.isEmpty) show"ProducerRecords(})" + else records.records.mkStringShow("ProducerRecords(", ", ", ")") } } diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala b/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala index 248a09224..88d645883 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala @@ -24,27 +24,23 @@ import org.apache.kafka.clients.producer.RecordMetadata *
* Use [[ProducerResult#apply]] to create a new [[ProducerResult]]. */ -sealed abstract class ProducerResult[+P, +K, +V] { +sealed abstract class ProducerResult[+K, +V] { /** * The records produced along with respective metadata. * Can be empty for passthrough-only. */ def records: Chunk[(ProducerRecord[K, V], RecordMetadata)] - - /** The passthrough value. */ - def passthrough: P } object ProducerResult { - private[this] final class ProducerResultImpl[+P, +K, +V]( - override val records: Chunk[(ProducerRecord[K, V], RecordMetadata)], - override val passthrough: P - ) extends ProducerResult[P, K, V] { + private[this] final class ProducerResultImpl[+K, +V]( + override val records: Chunk[(ProducerRecord[K, V], RecordMetadata)] + ) extends ProducerResult[K, V] { override def toString: String = if (records.isEmpty) - s"ProducerResult(, $passthrough)" + s"ProducerResult()" else records.mkStringAppend { case (append, (record, metadata)) => @@ -54,7 +50,7 @@ object ProducerResult { }( start = "ProducerResult(", sep = ", ", - end = s", $passthrough)" + end = s")" ) } @@ -63,20 +59,18 @@ object ProducerResult { * or more `ProducerRecord`s, finally emitting a passthrough * value and the `ProducerRecord`s with `RecordMetadata`. */ - def apply[P, K, V]( - records: Chunk[(ProducerRecord[K, V], RecordMetadata)], - passthrough: P - ): ProducerResult[P, K, V] = - new ProducerResultImpl(records, passthrough) + def apply[K, V]( + records: Chunk[(ProducerRecord[K, V], RecordMetadata)] + ): ProducerResult[K, V] = + new ProducerResultImpl(records) - implicit def producerResultShow[P, K, V]( + implicit def producerResultShow[K, V]( implicit K: Show[K], - V: Show[V], - P: Show[P] - ): Show[ProducerResult[P, K, V]] = Show.show { result => + V: Show[V] + ): Show[ProducerResult[K, V]] = Show.show { result => if (result.records.isEmpty) - show"ProducerResult(, ${result.passthrough})" + show"ProducerResult()" else result.records.mkStringAppend { case (append, (record, metadata)) => @@ -86,7 +80,7 @@ object ProducerResult { }( start = "ProducerResult(", sep = ", ", - end = show", ${result.passthrough})" + end = show")" ) } } diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 189fe9040..9708a2fb9 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -38,8 +38,8 @@ abstract class TransactionalKafkaProducer[F[_], K, V] { * transaction completes successfully. */ def produce[P]( - records: TransactionalProducerRecords[F, P, K, V] - ): F[ProducerResult[P, K, V]] + records: TransactionalProducerRecords[F, K, V] + ): F[ProducerResult[K, V]] } object TransactionalKafkaProducer { @@ -71,7 +71,7 @@ object TransactionalKafkaProducer { * or cancellation occurs, the transaction is aborted. The returned effect succeeds * if the whole transaction completes successfully. */ - def produceWithoutOffsets[P](records: ProducerRecords[P, K, V]): F[ProducerResult[P, K, V]] + def produceWithoutOffsets(records: ProducerRecords[K, V]): F[ProducerResult[K, V]] } /** @@ -97,13 +97,13 @@ object TransactionalKafkaProducer { ).mapN { (keySerializer, valueSerializer, withProducer) => new TransactionalKafkaProducer.WithoutOffsets[F, K, V] { override def produce[P]( - records: TransactionalProducerRecords[F, P, K, V] - ): F[ProducerResult[P, K, V]] = + records: TransactionalProducerRecords[F, K, V] + ): F[ProducerResult[K, V]] = produceTransactionWithOffsets(records) - .map(ProducerResult(_, records.passthrough)) + .map(ProducerResult(_)) private[this] def produceTransactionWithOffsets[P]( - records: TransactionalProducerRecords[F, P, K, V] + records: TransactionalProducerRecords[F, K, V] ): F[Chunk[(ProducerRecord[K, V], RecordMetadata)]] = if (records.records.isEmpty) F.pure(Chunk.empty) else { @@ -128,10 +128,10 @@ object TransactionalKafkaProducer { } } - override def produceWithoutOffsets[P]( - records: ProducerRecords[P, K, V] - ): F[ProducerResult[P, K, V]] = - produceTransaction(records.records, None).map(ProducerResult(_, records.passthrough)) + override def produceWithoutOffsets( + records: ProducerRecords[K, V] + ): F[ProducerResult[K, V]] = + produceTransaction(records.records, None).map(ProducerResult(_)) private[this] def produceTransaction[P]( records: Chunk[ProducerRecord[K, V]], diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala index f6db4af61..d14e4c3a7 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala @@ -27,23 +27,19 @@ import fs2.kafka.internal.syntax._ * commit exactly one offset, then emit a [[ProducerResult]] with the * results and specified passthrough value. */ -sealed abstract class TransactionalProducerRecords[F[_], +P, +K, +V] { +sealed abstract class TransactionalProducerRecords[F[_], +K, +V] { /** The records to produce and commit. Can be empty for passthrough-only. */ def records: Chunk[CommittableProducerRecords[F, K, V]] - - /** The passthrough to emit once all [[records]] have been produced and committed. */ - def passthrough: P } object TransactionalProducerRecords { - private[this] final class TransactionalProducerRecordsImpl[F[_], +P, +K, +V]( - override val records: Chunk[CommittableProducerRecords[F, K, V]], - override val passthrough: P - ) extends TransactionalProducerRecords[F, P, K, V] { + private[this] final class TransactionalProducerRecordsImpl[F[_], +K, +V]( + override val records: Chunk[CommittableProducerRecords[F, K, V]] + ) extends TransactionalProducerRecords[F, K, V] { override def toString: String = - if (records.isEmpty) s"TransactionalProducerRecords(, $passthrough)" - else records.mkString("TransactionalProducerRecords(", ", ", s", $passthrough)") + if (records.isEmpty) s"TransactionalProducerRecords()" + else records.mkString("TransactionalProducerRecords(", ", ", ")") } /** @@ -53,55 +49,32 @@ object TransactionalProducerRecords { */ def apply[F[_], K, V]( records: Chunk[CommittableProducerRecords[F, K, V]] - ): TransactionalProducerRecords[F, Unit, K, V] = - apply(records, ()) - - /** - * Creates a new [[TransactionalProducerRecords]] for producing zero or - * more [[CommittableProducerRecords]], emitting a [[ProducerResult]] - * with the results and specified passthrough value. - */ - def apply[F[_], P, K, V]( - records: Chunk[CommittableProducerRecords[F, K, V]], - passthrough: P - ): TransactionalProducerRecords[F, P, K, V] = - new TransactionalProducerRecordsImpl(records, passthrough) + ): TransactionalProducerRecords[F, K, V] = + new TransactionalProducerRecordsImpl(records) /** * Creates a new [[TransactionalProducerRecords]] for producing exactly * one [[CommittableProducerRecords]], emitting a [[ProducerResult]] - * with the result and `Unit` passthrough value. + * with the result and specified passthrough value. */ def one[F[_], K, V]( record: CommittableProducerRecords[F, K, V] - ): TransactionalProducerRecords[F, Unit, K, V] = - one(record, ()) - - /** - * Creates a new [[TransactionalProducerRecords]] for producing exactly - * one [[CommittableProducerRecords]], emitting a [[ProducerResult]] - * with the result and specified passthrough value. - */ - def one[F[_], P, K, V]( - record: CommittableProducerRecords[F, K, V], - passthrough: P - ): TransactionalProducerRecords[F, P, K, V] = - apply(Chunk.singleton(record), passthrough) + ): TransactionalProducerRecords[F, K, V] = + apply(Chunk.singleton(record)) implicit def transactionalProducerRecordsShow[F[_], P, K, V]( implicit K: Show[K], - V: Show[V], - P: Show[P] - ): Show[TransactionalProducerRecords[F, P, K, V]] = + V: Show[V] + ): Show[TransactionalProducerRecords[F, K, V]] = Show.show { records => if (records.records.isEmpty) - show"TransactionalProducerRecords(, ${records.passthrough})" + show"TransactionalProducerRecords()" else records.records.mkStringShow( "TransactionalProducerRecords(", ", ", - show", ${records.passthrough})" + ")" ) } } diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index 993351eb7..bd23c6312 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -1,5 +1,6 @@ package fs2.kafka +import cats.syntax.all._ import cats.effect.IO import cats.effect.unsafe.implicits.global import fs2.{Chunk, Stream} @@ -30,13 +31,13 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer.stream(producerSettings[IO]) _ <- Stream.eval(IO(producer.toString should startWith("KafkaProducer$"))) - records <- Stream.chunk(Chunk.seq(toProduce).map { + (records, passthrough) <- Stream.chunk(Chunk.seq(toProduce).map { case passthrough @ (key, value) => - ProducerRecords.one(ProducerRecord(topic, key, value), passthrough) + (ProducerRecords.one(ProducerRecord(topic, key, value)), passthrough) }) batched <- Stream .eval(producer.produce(records)) - .map(_.map(_.passthrough)) + .map(_.as(passthrough)) .buffer(toProduce.size) passthrough <- Stream.eval(batched) } yield passthrough).compile.toVector.unsafeRunSync() @@ -58,8 +59,8 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer[IO].stream(producerSettings[IO]) records <- Stream.chunk(Chunk.seq(toProduce).map { - case passthrough @ (key, value) => - ProducerRecords.one(ProducerRecord(topic, key, value), passthrough) + case (key, value) => + ProducerRecords.one(ProducerRecord(topic, key, value)) }) _ <- Stream.eval(producer.produce(records)) } yield ()).compile.toVector.unsafeRunSync() @@ -75,7 +76,6 @@ final class KafkaProducerSpec extends BaseKafkaSpec { withTopic { topic => createCustomTopic(topic, partitions = 3) val toProduce = (0 until 10).map(n => s"key-$n" -> s"value->$n").toList - val toPassthrough = "passthrough" val produced = (for { @@ -83,7 +83,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { records = ProducerRecords(toProduce.map { case (key, value) => ProducerRecord(topic, key, value) - }, toPassthrough) + }) result <- Stream.eval(producer.produce(records).flatten) } yield result).compile.lastOrError.unsafeRunSync() @@ -93,7 +93,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { record.key -> record.value }.toList - assert(records == toProduce && produced.passthrough == toPassthrough) + assert(records == toProduce) val consumed = consumeNumberKeyedMessagesFrom[String, String](topic, toProduce.size) @@ -110,11 +110,11 @@ final class KafkaProducerSpec extends BaseKafkaSpec { val result = (for { producer <- KafkaProducer.stream(producerSettings[IO]) - records = ProducerRecords(Nil, passthrough) - result <- Stream.eval(producer.produce(records).flatten) + records = ProducerRecords(Nil) + result <- Stream.eval(producer.produce(records).flatten.tupleRight(passthrough)) } yield result).compile.lastOrError.unsafeRunSync() - assert(result.passthrough == passthrough) + assert(result._2 == passthrough) } } @@ -127,11 +127,11 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer.stream(producerSettings[IO]) result <- Stream.eval { - producer.produce(ProducerRecords(Nil, passthrough)).flatten + producer.produce(ProducerRecords(Nil)).flatten.tupleRight(passthrough) } } yield result).compile.lastOrError.unsafeRunSync() - assert(result.passthrough == passthrough) + assert(result._2 == passthrough) } } @@ -189,12 +189,14 @@ final class KafkaProducerSpec extends BaseKafkaSpec { _ <- Stream.eval(IO(producer.toString should startWith("KafkaProducer$"))) batched <- Stream .eval( - producer.produceOne(ProducerRecord(topic, toProduce._1, toProduce._2), passthrough) + producer + .produceOne(ProducerRecord(topic, toProduce._1, toProduce._2)) + .map(_.tupleRight(passthrough)) ) result <- Stream.eval(batched) } yield result).compile.lastOrError.unsafeRunSync() - assert(result.passthrough == passthrough) + assert(result._2 == passthrough) } } @@ -209,11 +211,12 @@ final class KafkaProducerSpec extends BaseKafkaSpec { producer <- KafkaProducer.stream(producerSettings[IO]) _ <- Stream.eval(IO(producer.toString should startWith("KafkaProducer$"))) batched <- Stream - .eval(producer.produceOne(topic, toProduce._1, toProduce._2, passthrough)) + .eval(producer.produceOne(topic, toProduce._1, toProduce._2)) + .map(_.tupleRight(passthrough)) result <- Stream.eval(batched) } yield result).compile.lastOrError.unsafeRunSync() - assert(result.passthrough == passthrough) + assert(result._2 == passthrough) } } @@ -228,7 +231,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { records = ProducerRecords(toProduce.map { case (key, value) => ProducerRecord(topic, key, value) - }, ()) + }) result <- Stream.eval(producer.produce_(records).flatten) } yield result).compile.lastOrError.unsafeRunSync() diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala index 74d102aae..67627dd3a 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala @@ -8,12 +8,6 @@ final class ProducerRecordsSpec extends BaseSpec { val record = ProducerRecord("topic", "key", "value") assert { - ProducerRecords - .one[Int, String, String](record, 123) - .toString == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), 123)" && - ProducerRecords - .one[Int, String, String](record, 123) - .show == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), 123)" && ProducerRecords .one[String, String](record) .toString == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" && @@ -26,18 +20,9 @@ final class ProducerRecordsSpec extends BaseSpec { it("should be able to create with multiple records") { val records = List(ProducerRecord("topic", "key", "value")) assert { - ProducerRecords[List, Int, String, String](records, 123).toString == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), 123)" && - ProducerRecords[List, Int, String, String](records, 123).show == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), 123)" && ProducerRecords[List, String, String](records).toString == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" && ProducerRecords[List, String, String](records).show == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" } } - - it("should be able to create with passthrough only") { - assert { - ProducerRecords[List, Int, String, String](Nil, 123).toString == "ProducerRecords(, 123)" && - ProducerRecords[List, Int, String, String](Nil, 123).show == "ProducerRecords(, 123)" - } - } } } diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala index c7bc53da8..25264c8f9 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala @@ -11,8 +11,8 @@ final class ProducerResultSpec extends BaseSpec { val empty: Chunk[(ProducerRecord[String, String], RecordMetadata)] = Chunk.empty assert { - ProducerResult(empty, 123).toString == "ProducerResult(, 123)" && - ProducerResult(empty, 123).show == ProducerResult(empty, 123).toString + ProducerResult(empty).toString == "ProducerResult()" && + ProducerResult(empty).show == ProducerResult(empty).toString } val one: Chunk[(ProducerRecord[String, String], RecordMetadata)] = @@ -25,8 +25,8 @@ final class ProducerResultSpec extends BaseSpec { ) assert { - ProducerResult(one, 123).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value, headers = Headers(key -> [])), 123)" && - ProducerResult(one, 123).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value, headers = Headers(key -> [])), 123)" + ProducerResult(one).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value, headers = Headers(key -> [])))" && + ProducerResult(one).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value, headers = Headers(key -> [])))" } val two: Chunk[(ProducerRecord[String, String], RecordMetadata)] = @@ -38,8 +38,8 @@ final class ProducerResultSpec extends BaseSpec { ) assert { - ProducerResult(two, 123).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value), 123)" && - ProducerResult(two, 123).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value), 123)" + ProducerResult(two).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value), 123)" && + ProducerResult(two).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value), 123)" } } } diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 928b9bb0a..6d955e186 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -74,32 +74,33 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { ) ) _ <- Stream.eval(IO(producer.toString should startWith("TransactionalKafkaProducer$"))) - records <- Stream.chunk(Chunk.seq(toProduce)).zipWithIndex.map { + (records, passthrough) <- Stream.chunk(Chunk.seq(toProduce)).zipWithIndex.map { case ((key, value), i) => val record = ProducerRecord(topic, key, value) makeOffset.fold[ Either[ - ProducerRecords[(String, String), String, String], - TransactionalProducerRecords[IO, (String, String), String, String] + ProducerRecords[String, String], + TransactionalProducerRecords[IO, String, String] ] - ](Left(ProducerRecords.one(record, (key, value))))( + ](Left(ProducerRecords.one(record)))( offset => Right( TransactionalProducerRecords.one( CommittableProducerRecords.one( record, offset(i) - ), - (key, value) + ) ) ) - ) + ) -> (key, value) } passthrough <- Stream - .eval(records.fold(producer.produceWithoutOffsets, producer.produce)) - .map(_.passthrough) + .eval( + records.fold(producer.produceWithoutOffsets, producer.produce).tupleRight(passthrough) + ) + .map(_._2) .buffer(toProduce.size) } yield passthrough).compile.toVector.unsafeRunSync() @@ -174,25 +175,24 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { record, offset ) - }, - toPassthrough + } ) - producer.produce(records) + producer.produce(records).tupleRight(toPassthrough) case None => - val records = ProducerRecords(recordsToProduce, toPassthrough) - producer.produceWithoutOffsets(records) + val records = ProducerRecords(recordsToProduce) + producer.produceWithoutOffsets(records).tupleRight(toPassthrough) } result <- Stream.eval(produce) } yield result).compile.lastOrError.unsafeRunSync() val records = - produced.records.map { + produced._1.records.map { case (record, _) => record.key -> record.value } - assert(records == toProduce && produced.passthrough == toPassthrough) + assert(records == toProduce && produced._2 == toPassthrough) val consumed = { val customConsumerProperties = @@ -325,10 +325,9 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { NonEmptyList.one(record), offset ) - }, - toPassthrough + } ) - result <- Stream.eval(producer.produce(records).attempt) + result <- Stream.eval(producer.produce(records).tupleRight(toPassthrough).attempt) } yield result).compile.lastOrError.unsafeRunSync() produced shouldBe Left(error) diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala index fc447344d..52e695d29 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala @@ -20,9 +20,6 @@ class TransactionalProducerRecordsSpec extends BaseSpec { ) assert { - TransactionalProducerRecords - .one(CommittableProducerRecords.one(record, offset), 123) - .toString == "TransactionalProducerRecords(CommittableProducerRecords(ProducerRecord(topic = topic, key = key, value = value), CommittableOffset(topic-1 -> 1, the-group)), 123)" && TransactionalProducerRecords .one(CommittableProducerRecords.one(record, offset)) .toString == "TransactionalProducerRecords(CommittableProducerRecords(ProducerRecord(topic = topic, key = key, value = value), CommittableOffset(topic-1 -> 1, the-group)), ())" @@ -43,9 +40,6 @@ class TransactionalProducerRecordsSpec extends BaseSpec { ) assert { - TransactionalProducerRecords - .one(CommittableProducerRecords(records, offset), 123) - .toString == "TransactionalProducerRecords(CommittableProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ProducerRecord(topic = topic2, key = key2, value = value2), CommittableOffset(topic-1 -> 1, the-group)), 123)" && TransactionalProducerRecords .one(CommittableProducerRecords(records, offset)) .toString == "TransactionalProducerRecords(CommittableProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ProducerRecord(topic = topic2, key = key2, value = value2), CommittableOffset(topic-1 -> 1, the-group)), ())" @@ -54,7 +48,6 @@ class TransactionalProducerRecordsSpec extends BaseSpec { it("should be able to create with zero records") { assert { - TransactionalProducerRecords[IO, Int, String, String](Chunk.empty, 123).toString == "TransactionalProducerRecords(, 123)" && TransactionalProducerRecords[IO, String, String](Chunk.empty).toString == "TransactionalProducerRecords(, ())" } } diff --git a/scalafix/rules/src/main/scala/fix/Fs2Kafka.scala b/scalafix/rules/src/main/scala/fix/Fs2Kafka.scala index 0b721e26d..04c44a449 100644 --- a/scalafix/rules/src/main/scala/fix/Fs2Kafka.scala +++ b/scalafix/rules/src/main/scala/fix/Fs2Kafka.scala @@ -44,7 +44,7 @@ class Fs2Kafka extends SemanticRule("Fs2Kafka") { // ProducerRecords.one[K, V, P] -> ProducerRecords.one[P, K, V] case term @ Term.ApplyType(ProducerRecords_one_M(fun), List(k, v, p)) => Patch.replaceTree(term, s"${fun.syntax}[$p, $k, $v]") - // ProducerResult[K, V, P] -> ProducerResult[P, K, V] + // ProducerResult[K, V, P] -> ProducerResult[K, V] case term @ Type.Apply(ProducerResult_M(fun), List(k, v, p)) => Patch.replaceTree(term, s"${fun.syntax}[$p, $k, $v]") case term @ Term.ApplyType(ProducerResult_M(fun), List(k, v, p)) => From 9d2474fa2401365b6522508b9f0d9bc8bdf95cfd Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 14:48:30 +0000 Subject: [PATCH 054/162] Add syntax for instantiating ProducerRecords --- .../scala/fs2/kafka/ProducerRecords.scala | 60 ++----------------- 1 file changed, 4 insertions(+), 56 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala index 09ea871a0..4231693fa 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala @@ -6,56 +6,20 @@ package fs2.kafka -import cats.{Show, Traverse} -import cats.syntax.show._ import fs2.Chunk -import fs2.kafka.internal.syntax._ import scala.collection.mutable - -/** - * [[ProducerRecords]] represents zero or more `ProducerRecord`s, - * together with an arbitrary passthrough value, all of which can - * be used with [[KafkaProducer]]. [[ProducerRecords]]s can be - * created using one of the following options.
- *
- * - `ProducerRecords#apply` to produce zero or more records - * and then emit a [[ProducerResult]] with the results and - * specified passthrough value.
- * - `ProducerRecords#one` to produce exactly one record and - * then emit a [[ProducerResult]] with the result and specified - * passthrough value.
- *
- * The [[passthrough]] and [[records]] can be retrieved from an - * existing [[ProducerRecords]] instance.
- */ -sealed abstract class ProducerRecords[+K, +V] { - - /** The records to produce. Can be empty for passthrough-only. */ - def records: Chunk[ProducerRecord[K, V]] -} +import cats.Traverse object ProducerRecords { - private[this] final class ProducerRecordsImpl[+K, +V]( - override val records: Chunk[ProducerRecord[K, V]] - ) extends ProducerRecords[K, V] { - override def toString: String = - if (records.isEmpty) s"ProducerRecords()" - else records.mkString("ProducerRecords(", ", ", ")") - } - /** - * Creates a new [[ProducerRecords]] for producing zero or more - * `ProducerRecords`s, then emitting a [[ProducerResult]] with - * the results and specified passthrough value. - */ def apply[F[+_], K, V]( records: F[ProducerRecord[K, V]] )( implicit F: Traverse[F] ): ProducerRecords[K, V] = { val numRecords = F.size(records).toInt - val chunk = if (numRecords <= 1) { + if (numRecords <= 1) { F.get(records)(0) match { case None => Chunk.empty[ProducerRecord[K, V]] case Some(record) => Chunk.singleton(record) @@ -69,25 +33,9 @@ object ProducerRecords { } Chunk.array(buf.toArray) } - new ProducerRecordsImpl(chunk) } - /** - * Creates a new [[ProducerRecords]] for producing exactly one - * `ProducerRecord`, then emitting a [[ProducerResult]] with - * the result and specified passthrough value. - */ - def one[K, V]( - record: ProducerRecord[K, V] - ): ProducerRecords[K, V] = - new ProducerRecordsImpl(Chunk.singleton(record)) + def one[K, V](record: ProducerRecord[K, V]): ProducerRecords[K, V] = + Chunk.singleton(record) - implicit def producerRecordsShow[K, V]( - implicit - K: Show[K], - V: Show[V] - ): Show[ProducerRecords[K, V]] = Show.show { records => - if (records.records.isEmpty) show"ProducerRecords(})" - else records.records.mkStringShow("ProducerRecords(", ", ", ")") - } } From f63807bcd60183be33f5a5144b6e88d252a70fc3 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 14:49:14 +0000 Subject: [PATCH 055/162] ProducerRecords is an alias for Chunk --- modules/core/src/main/scala/fs2/kafka/package.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 6109c6df7..65929d1eb 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -48,6 +48,8 @@ package object kafka { type KafkaByteProducerRecord = org.apache.kafka.clients.producer.ProducerRecord[Array[Byte], Array[Byte]] + type ProducerRecords[K, V] = Chunk[ProducerRecord[K, V]] + /** * Commits offsets in batches of every `n` offsets or time window * of length `d`, whichever happens first. If there are no offsets From ccde853c8aafb8504ac45c245666beaff5cff744 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 14:50:15 +0000 Subject: [PATCH 056/162] Fix calls to `records` --- .../main/scala/fs2/kafka/KafkaProducer.scala | 2 +- .../kafka/TransactionalKafkaProducer.scala | 2 +- .../scala/fs2/kafka/ProducerRecordsSpec.scala | 28 ------------------- 3 files changed, 2 insertions(+), 30 deletions(-) delete mode 100644 modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index f3d50a8da..81691c561 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -148,7 +148,7 @@ object KafkaProducer { records: ProducerRecords[K, V] ): F[F[ProducerResult[K, V]]] = withProducer { (producer, blocking) => - records.records + records .traverse(produceRecord(keySerializer, valueSerializer, producer, blocking)) .map(_.sequence.map(ProducerResult(_))) } diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 9708a2fb9..294e443f5 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -131,7 +131,7 @@ object TransactionalKafkaProducer { override def produceWithoutOffsets( records: ProducerRecords[K, V] ): F[ProducerResult[K, V]] = - produceTransaction(records.records, None).map(ProducerResult(_)) + produceTransaction(records, None).map(ProducerResult(_)) private[this] def produceTransaction[P]( records: Chunk[ProducerRecord[K, V]], diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala deleted file mode 100644 index 67627dd3a..000000000 --- a/modules/core/src/test/scala/fs2/kafka/ProducerRecordsSpec.scala +++ /dev/null @@ -1,28 +0,0 @@ -package fs2.kafka - -import cats.syntax.all._ - -final class ProducerRecordsSpec extends BaseSpec { - describe("ProducerRecords") { - it("should be able to create with one record") { - val record = ProducerRecord("topic", "key", "value") - - assert { - ProducerRecords - .one[String, String](record) - .toString == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" && - ProducerRecords - .one[String, String](record) - .show == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" - } - } - - it("should be able to create with multiple records") { - val records = List(ProducerRecord("topic", "key", "value")) - assert { - ProducerRecords[List, String, String](records).toString == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" && - ProducerRecords[List, String, String](records).show == "ProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ())" - } - } - } -} From be776d79e0245b8061c05dff727eb1e24d2cfbb0 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 14:59:34 +0000 Subject: [PATCH 057/162] TransactionalProducerRecords is an alias --- docs/src/main/mdoc/transactions.md | 1 - .../kafka/TransactionalKafkaProducer.scala | 8 +-- .../kafka/TransactionalProducerRecords.scala | 60 ++----------------- .../src/main/scala/fs2/kafka/package.scala | 2 + .../TransactionalKafkaProducerSpec.scala | 35 +++++------ .../TransactionalProducerRecordsSpec.scala | 55 ----------------- 6 files changed, 25 insertions(+), 136 deletions(-) delete mode 100644 modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala diff --git a/docs/src/main/mdoc/transactions.md b/docs/src/main/mdoc/transactions.md index 448dd062b..9f127b2bf 100644 --- a/docs/src/main/mdoc/transactions.md +++ b/docs/src/main/mdoc/transactions.md @@ -58,7 +58,6 @@ object Main extends IOApp { } } .groupWithin(500, 15.seconds) - .map(TransactionalProducerRecords(_)) .evalMap(producer.produce) } diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 294e443f5..5ce08769a 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -103,12 +103,12 @@ object TransactionalKafkaProducer { .map(ProducerResult(_)) private[this] def produceTransactionWithOffsets[P]( - records: TransactionalProducerRecords[F, K, V] + records: Chunk[CommittableProducerRecords[F, K, V]] ): F[Chunk[(ProducerRecord[K, V], RecordMetadata)]] = - if (records.records.isEmpty) F.pure(Chunk.empty) + if (records.isEmpty) F.pure(Chunk.empty) else { val batch = - CommittableOffsetBatch.fromFoldableMap(records.records)(_.offset) + CommittableOffsetBatch.fromFoldableMap(records)(_.offset) val consumerGroupId = if (batch.consumerGroupIdsMissing || batch.consumerGroupIds.size != 1) @@ -124,7 +124,7 @@ object TransactionalKafkaProducer { ) } - produceTransaction(records.records.flatMap(_.records), Some(sendOffsets)) + produceTransaction(records.flatMap(_.records), Some(sendOffsets)) } } diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala index d14e4c3a7..f275b01dd 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala @@ -6,51 +6,14 @@ package fs2.kafka -import cats.Show -import cats.syntax.show._ import fs2.Chunk -import fs2.kafka.internal.syntax._ - -/** - * Represents zero or more [[CommittableProducerRecords]], together with - * arbitrary passthrough value, all of which can be used together with a - * [[TransactionalKafkaProducer]] to produce records and commit offsets - * within a single transaction.
- *
- * [[TransactionalProducerRecords]]s can be created using one of the - * following options.
- *
- * - `TransactionalProducerRecords#apply` to produce zero or more records, - * commit the offsets, and then emit a [[ProducerResult]] with the results - * and specified passthrough value.
- * - `TransactionalProducerRecords#one` to produce zero or more records, - * commit exactly one offset, then emit a [[ProducerResult]] with the - * results and specified passthrough value. - */ -sealed abstract class TransactionalProducerRecords[F[_], +K, +V] { - - /** The records to produce and commit. Can be empty for passthrough-only. */ - def records: Chunk[CommittableProducerRecords[F, K, V]] -} object TransactionalProducerRecords { - private[this] final class TransactionalProducerRecordsImpl[F[_], +K, +V]( - override val records: Chunk[CommittableProducerRecords[F, K, V]] - ) extends TransactionalProducerRecords[F, K, V] { - override def toString: String = - if (records.isEmpty) s"TransactionalProducerRecords()" - else records.mkString("TransactionalProducerRecords(", ", ", ")") - } - /** - * Creates a new [[TransactionalProducerRecords]] for producing zero or - * more [[CommittableProducerRecords]], emitting a [[ProducerResult]] - * with the results and `Unit` passthrough value. - */ + @deprecated("this is now an identity operation", "3.0.0-M5") def apply[F[_], K, V]( - records: Chunk[CommittableProducerRecords[F, K, V]] - ): TransactionalProducerRecords[F, K, V] = - new TransactionalProducerRecordsImpl(records) + chunk: Chunk[CommittableProducerRecords[F, K, V]] + ): Chunk[CommittableProducerRecords[F, K, V]] = chunk /** * Creates a new [[TransactionalProducerRecords]] for producing exactly @@ -60,21 +23,6 @@ object TransactionalProducerRecords { def one[F[_], K, V]( record: CommittableProducerRecords[F, K, V] ): TransactionalProducerRecords[F, K, V] = - apply(Chunk.singleton(record)) + Chunk.singleton(record) - implicit def transactionalProducerRecordsShow[F[_], P, K, V]( - implicit - K: Show[K], - V: Show[V] - ): Show[TransactionalProducerRecords[F, K, V]] = - Show.show { records => - if (records.records.isEmpty) - show"TransactionalProducerRecords()" - else - records.records.mkStringShow( - "TransactionalProducerRecords(", - ", ", - ")" - ) - } } diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 65929d1eb..c902a3810 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -50,6 +50,8 @@ package object kafka { type ProducerRecords[K, V] = Chunk[ProducerRecord[K, V]] + type TransactionalProducerRecords[F[_], +K, +V] = Chunk[CommittableProducerRecords[F, K, V]] + /** * Commits offsets in batches of every `n` offsets or time window * of length `d`, whichever happens first. If there are no offsets diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 6d955e186..0f8c349fd 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -168,7 +168,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { val offsets = toProduce.mapWithIndex { case (_, i) => offset(i) } - val records = TransactionalProducerRecords( + val records = recordsToProduce.zip(offsets).map { case (record, offset) => CommittableProducerRecords.one( @@ -176,7 +176,6 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { offset ) } - ) producer.produce(records).tupleRight(toPassthrough) case None => val records = ProducerRecords(recordsToProduce) @@ -234,15 +233,13 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { _ => IO.unit ) } - records = TransactionalProducerRecords( - recordsToProduce.zip(offsets).map { - case (record, offset) => - CommittableProducerRecords.one( - record, - offset - ) - } - ) + records = recordsToProduce.zip(offsets).map { + case (record, offset) => + CommittableProducerRecords.one( + record, + offset + ) + } _ <- Stream .eval(producer.produce(records)) .concurrently( @@ -318,15 +315,13 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { _ => IO.unit ) } - records = TransactionalProducerRecords( - Chunk.seq(recordsToProduce.zip(offsets)).map { - case (record, offset) => - CommittableProducerRecords( - NonEmptyList.one(record), - offset - ) - } - ) + records = Chunk.seq(recordsToProduce.zip(offsets)).map { + case (record, offset) => + CommittableProducerRecords( + NonEmptyList.one(record), + offset + ) + } result <- Stream.eval(producer.produce(records).tupleRight(toPassthrough).attempt) } yield result).compile.lastOrError.unsafeRunSync() diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala deleted file mode 100644 index 52e695d29..000000000 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalProducerRecordsSpec.scala +++ /dev/null @@ -1,55 +0,0 @@ -package fs2.kafka - -import cats.effect.IO -import cats.instances.list._ -import org.apache.kafka.clients.consumer.OffsetAndMetadata -import org.apache.kafka.common.TopicPartition -import fs2.Chunk - -class TransactionalProducerRecordsSpec extends BaseSpec { - describe("TransactionalProducerRecords") { - it("should be able to create with one record") { - val record = ProducerRecord("topic", "key", "value") - - val offset = - CommittableOffset[IO]( - new TopicPartition("topic", 1), - new OffsetAndMetadata(1), - Some("the-group"), - _ => IO.unit - ) - - assert { - TransactionalProducerRecords - .one(CommittableProducerRecords.one(record, offset)) - .toString == "TransactionalProducerRecords(CommittableProducerRecords(ProducerRecord(topic = topic, key = key, value = value), CommittableOffset(topic-1 -> 1, the-group)), ())" - } - } - - it("should be able to create with multiple records") { - val records = List( - ProducerRecord("topic", "key", "value"), - ProducerRecord("topic2", "key2", "value2") - ) - - val offset = CommittableOffset[IO]( - new TopicPartition("topic", 1), - new OffsetAndMetadata(1), - Some("the-group"), - _ => IO.unit - ) - - assert { - TransactionalProducerRecords - .one(CommittableProducerRecords(records, offset)) - .toString == "TransactionalProducerRecords(CommittableProducerRecords(ProducerRecord(topic = topic, key = key, value = value), ProducerRecord(topic = topic2, key = key2, value = value2), CommittableOffset(topic-1 -> 1, the-group)), ())" - } - } - - it("should be able to create with zero records") { - assert { - TransactionalProducerRecords[IO, String, String](Chunk.empty).toString == "TransactionalProducerRecords(, ())" - } - } - } -} From 0180965a20d5e34900d1b0ca2f11aae3631f625f Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 15:04:08 +0000 Subject: [PATCH 058/162] tuple passthough on the left rather than right --- .../test/scala/fs2/kafka/KafkaProducerSpec.scala | 16 ++++++++-------- .../kafka/TransactionalKafkaProducerSpec.scala | 10 +++++----- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index bd23c6312..7bba87d68 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -111,10 +111,10 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer.stream(producerSettings[IO]) records = ProducerRecords(Nil) - result <- Stream.eval(producer.produce(records).flatten.tupleRight(passthrough)) + result <- Stream.eval(producer.produce(records).flatten.tupleLeft(passthrough)) } yield result).compile.lastOrError.unsafeRunSync() - assert(result._2 == passthrough) + assert(result._1 == passthrough) } } @@ -127,11 +127,11 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer.stream(producerSettings[IO]) result <- Stream.eval { - producer.produce(ProducerRecords(Nil)).flatten.tupleRight(passthrough) + producer.produce(ProducerRecords(Nil)).flatten.tupleLeft(passthrough) } } yield result).compile.lastOrError.unsafeRunSync() - assert(result._2 == passthrough) + assert(result._1 == passthrough) } } @@ -191,12 +191,12 @@ final class KafkaProducerSpec extends BaseKafkaSpec { .eval( producer .produceOne(ProducerRecord(topic, toProduce._1, toProduce._2)) - .map(_.tupleRight(passthrough)) + .map(_.tupleLeft(passthrough)) ) result <- Stream.eval(batched) } yield result).compile.lastOrError.unsafeRunSync() - assert(result._2 == passthrough) + assert(result._1 == passthrough) } } @@ -212,11 +212,11 @@ final class KafkaProducerSpec extends BaseKafkaSpec { _ <- Stream.eval(IO(producer.toString should startWith("KafkaProducer$"))) batched <- Stream .eval(producer.produceOne(topic, toProduce._1, toProduce._2)) - .map(_.tupleRight(passthrough)) + .map(_.tupleLeft(passthrough)) result <- Stream.eval(batched) } yield result).compile.lastOrError.unsafeRunSync() - assert(result._2 == passthrough) + assert(result._1 == passthrough) } } diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 0f8c349fd..f8867156a 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -176,22 +176,22 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { offset ) } - producer.produce(records).tupleRight(toPassthrough) + producer.produce(records).tupleLeft(toPassthrough) case None => val records = ProducerRecords(recordsToProduce) - producer.produceWithoutOffsets(records).tupleRight(toPassthrough) + producer.produceWithoutOffsets(records).tupleLeft(toPassthrough) } result <- Stream.eval(produce) } yield result).compile.lastOrError.unsafeRunSync() val records = - produced._1.records.map { + produced._2.records.map { case (record, _) => record.key -> record.value } - assert(records == toProduce && produced._2 == toPassthrough) + assert(records == toProduce && produced._1 == toPassthrough) val consumed = { val customConsumerProperties = @@ -322,7 +322,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { offset ) } - result <- Stream.eval(producer.produce(records).tupleRight(toPassthrough).attempt) + result <- Stream.eval(producer.produce(records).tupleLeft(toPassthrough).attempt) } yield result).compile.lastOrError.unsafeRunSync() produced shouldBe Left(error) From 6694db06982564ba08872f6abf7a1027ef47c272 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sun, 13 Mar 2022 15:45:09 +0000 Subject: [PATCH 059/162] Fix ProduceResult show and test --- modules/core/src/main/scala/fs2/kafka/ProducerResult.scala | 2 +- .../core/src/test/scala/fs2/kafka/ProducerResultSpec.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala b/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala index 88d645883..60c2f9641 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala @@ -80,7 +80,7 @@ object ProducerResult { }( start = "ProducerResult(", sep = ", ", - end = show")" + end = ")" ) } } diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala index 25264c8f9..0d15f1b1e 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala @@ -38,8 +38,8 @@ final class ProducerResultSpec extends BaseSpec { ) assert { - ProducerResult(two).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value), 123)" && - ProducerResult(two).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value), 123)" + ProducerResult(two).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value))" && + ProducerResult(two).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value))" } } } From 61493123ae71d883c143e946cd54afd8450f9532 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 14 Mar 2022 08:15:18 +0100 Subject: [PATCH 060/162] Update logback-classic to 1.2.11 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 023656c76..ad4c9be4a 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val dependencySettings = Seq( "org.typelevel" %% "discipline-scalatest" % "2.1.5", "org.typelevel" %% "cats-effect-laws" % catsEffectVersion, "org.typelevel" %% "cats-effect-testkit" % catsEffectVersion, - "ch.qos.logback" % "logback-classic" % "1.2.10" + "ch.qos.logback" % "logback-classic" % "1.2.11" ).map(_ % Test), libraryDependencies ++= { if (scalaVersion.value.startsWith("3")) Nil From c2797ef22745e87cc44677b33179d13608a1a207 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 14 Mar 2022 08:15:27 +0100 Subject: [PATCH 061/162] Update cats-effect-laws, ... to 3.3.7 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 023656c76..72823f3ad 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,4 @@ -val catsEffectVersion = "3.3.6" +val catsEffectVersion = "3.3.7" val catsVersion = "2.6.1" From 70c2b13f5072b5c56a8d16a414737211119dae67 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 08:54:42 +0000 Subject: [PATCH 062/162] Remove pipeWithPassthrough --- docs/src/main/mdoc/quick-example.md | 13 +++++++++---- .../src/main/scala/fs2/kafka/KafkaProducer.scala | 15 --------------- .../main/scala/fs2/kafka/ProducerSettings.scala | 3 ++- 3 files changed, 11 insertions(+), 20 deletions(-) diff --git a/docs/src/main/mdoc/quick-example.md b/docs/src/main/mdoc/quick-example.md index a02fe7e94..25512c365 100644 --- a/docs/src/main/mdoc/quick-example.md +++ b/docs/src/main/mdoc/quick-example.md @@ -39,10 +39,15 @@ object Main extends IOApp { val record = ProducerRecord("topic", key, value) committable.offset -> ProducerRecords.one(record) } - } - .through(KafkaProducer.pipeWithPassthrough(producerSettings)) - .map(_._1) - .through(commitBatchWithin(500, 15.seconds)) + }.through { offsetsAndProducerRecords => + KafkaProducer.stream(producerSettings).flatMap { producer => + offsetsAndProducerRecords.evalMap { + case (offset, producerRecord) => + producer.produce(producerRecord) + .map(_.as(offset)) + }.parEvalMap(ProducerSettings.DefaultParallelism)(identity) + } + }.through(commitBatchWithin(500, 15.seconds)) stream.compile.drain.as(ExitCode.Success) } diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index 81691c561..5cd7f3bd6 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -212,14 +212,6 @@ object KafkaProducer { ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = records => stream(settings).flatMap(pipe(settings, _).apply(records)) - def pipeWithPassthrough[F[_], K, V, P]( - settings: ProducerSettings[F, K, V] - )( - implicit F: Async[F], - mk: MkProducer[F] - ): Pipe[F, (P, ProducerRecords[K, V]), (P, ProducerResult[K, V])] = - records => stream(settings).flatMap(pipeWithPassthrough(settings, _).apply(records)) - /** * Produces records in batches using the provided [[KafkaProducer]]. * The number of records in the same batch is limited using the @@ -231,13 +223,6 @@ object KafkaProducer { ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = _.evalMap(producer.produce).mapAsync(settings.parallelism)(identity) - def pipeWithPassthrough[F[_]: Concurrent, K, V, P]( - settings: ProducerSettings[F, K, V], - producer: KafkaProducer[F, K, V] - ): Pipe[F, (P, ProducerRecords[K, V]), (P, ProducerResult[K, V])] = - _.evalMap { case (p, records) => producer.produce(records).map(_.tupleLeft(p)) } - .mapAsync(settings.parallelism)(identity) - private[this] def serializeToBytes[F[_], K, V]( keySerializer: Serializer[F, K], valueSerializer: Serializer[F, V], diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index fd7e30b14..03fe8a740 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -234,6 +234,7 @@ sealed abstract class ProducerSettings[F[_], K, V] { } object ProducerSettings { + val DefaultParallelism: Int = 10000 private[this] final case class ProducerSettingsImpl[F[_], K, V]( override val keySerializer: F[Serializer[F, K]], override val valueSerializer: F[Serializer[F, V]], @@ -323,7 +324,7 @@ object ProducerSettings { ProducerConfig.RETRIES_CONFIG -> "0" ), closeTimeout = 60.seconds, - parallelism = 10000 + parallelism = DefaultParallelism ) def apply[F[_], K, V]( From f45db3f80d5936c566ba253b6ea24365f45f54d1 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 09:06:19 +0000 Subject: [PATCH 063/162] Moave ProducerRecords constructors to package object --- .../scala/fs2/kafka/ProducerRecords.scala | 41 -------------- .../kafka/TransactionalProducerRecords.scala | 28 ---------- .../src/main/scala/fs2/kafka/package.scala | 54 +++++++++++++++++++ 3 files changed, 54 insertions(+), 69 deletions(-) delete mode 100644 modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala delete mode 100644 modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala deleted file mode 100644 index 4231693fa..000000000 --- a/modules/core/src/main/scala/fs2/kafka/ProducerRecords.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018-2022 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka - -import fs2.Chunk - -import scala.collection.mutable -import cats.Traverse - -object ProducerRecords { - - def apply[F[+_], K, V]( - records: F[ProducerRecord[K, V]] - )( - implicit F: Traverse[F] - ): ProducerRecords[K, V] = { - val numRecords = F.size(records).toInt - if (numRecords <= 1) { - F.get(records)(0) match { - case None => Chunk.empty[ProducerRecord[K, V]] - case Some(record) => Chunk.singleton(record) - } - } else { - val buf = new mutable.ArrayBuffer[ProducerRecord[K, V]](numRecords) - F.foldLeft(records, ()) { - case (_, record) => - buf += record - () - } - Chunk.array(buf.toArray) - } - } - - def one[K, V](record: ProducerRecord[K, V]): ProducerRecords[K, V] = - Chunk.singleton(record) - -} diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala deleted file mode 100644 index f275b01dd..000000000 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerRecords.scala +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright 2018-2022 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka - -import fs2.Chunk - -object TransactionalProducerRecords { - - @deprecated("this is now an identity operation", "3.0.0-M5") - def apply[F[_], K, V]( - chunk: Chunk[CommittableProducerRecords[F, K, V]] - ): Chunk[CommittableProducerRecords[F, K, V]] = chunk - - /** - * Creates a new [[TransactionalProducerRecords]] for producing exactly - * one [[CommittableProducerRecords]], emitting a [[ProducerResult]] - * with the result and specified passthrough value. - */ - def one[F[_], K, V]( - record: CommittableProducerRecords[F, K, V] - ): TransactionalProducerRecords[F, K, V] = - Chunk.singleton(record) - -} diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index c902a3810..42daca580 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -6,7 +6,11 @@ package fs2 +import fs2.Chunk +import cats.Traverse import cats.effect._ + +import scala.collection.mutable import scala.concurrent.duration.FiniteDuration package object kafka { @@ -63,3 +67,53 @@ package object kafka { ): Pipe[F, CommittableOffset[F], Unit] = _.groupWithin(n, d).evalMap(CommittableOffsetBatch.fromFoldable(_).commit) } +package kafka { + + object ProducerRecords { + + def apply[F[+_], K, V]( + records: F[ProducerRecord[K, V]] + )( + implicit F: Traverse[F] + ): ProducerRecords[K, V] = { + val numRecords = F.size(records).toInt + if (numRecords <= 1) { + F.get(records)(0) match { + case None => Chunk.empty[ProducerRecord[K, V]] + case Some(record) => Chunk.singleton(record) + } + } else { + val buf = new mutable.ArrayBuffer[ProducerRecord[K, V]](numRecords) + F.foldLeft(records, ()) { + case (_, record) => + buf += record + () + } + Chunk.array(buf.toArray) + } + } + + def one[K, V](record: ProducerRecord[K, V]): ProducerRecords[K, V] = + Chunk.singleton(record) + + } + + object TransactionalProducerRecords { + + @deprecated("this is now an identity operation", "3.0.0-M5") + def apply[F[_], K, V]( + chunk: Chunk[CommittableProducerRecords[F, K, V]] + ): Chunk[CommittableProducerRecords[F, K, V]] = chunk + + /** + * Creates a new [[TransactionalProducerRecords]] for producing exactly + * one [[CommittableProducerRecords]], emitting a [[ProducerResult]] + * with the result and specified passthrough value. + */ + def one[F[_], K, V]( + record: CommittableProducerRecords[F, K, V] + ): TransactionalProducerRecords[F, K, V] = + Chunk.singleton(record) + + } +} From 4d659c8923680dc1ae55a3a669b5b53368e9a1b4 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 10:22:07 +0000 Subject: [PATCH 064/162] Unlimited parallelism on waiting for producer results --- docs/src/main/mdoc/producers.md | 2 +- docs/src/main/mdoc/quick-example.md | 2 +- .../main/scala/fs2/kafka/KafkaProducer.scala | 8 +++---- .../scala/fs2/kafka/ProducerSettings.scala | 22 ++----------------- 4 files changed, 7 insertions(+), 27 deletions(-) diff --git a/docs/src/main/mdoc/producers.md b/docs/src/main/mdoc/producers.md index 359ca225d..a9a97031c 100644 --- a/docs/src/main/mdoc/producers.md +++ b/docs/src/main/mdoc/producers.md @@ -206,7 +206,7 @@ object PartitionedProduceExample extends IOApp { val record = ProducerRecord("topic", key, value) ProducerRecords.one(record) } - .through(KafkaProducer.pipe(producerSettings, producer)) + .through(KafkaProducer.pipe(producer)) } .parJoinUnbounded } diff --git a/docs/src/main/mdoc/quick-example.md b/docs/src/main/mdoc/quick-example.md index 25512c365..3a3fdace2 100644 --- a/docs/src/main/mdoc/quick-example.md +++ b/docs/src/main/mdoc/quick-example.md @@ -45,7 +45,7 @@ object Main extends IOApp { case (offset, producerRecord) => producer.produce(producerRecord) .map(_.as(offset)) - }.parEvalMap(ProducerSettings.DefaultParallelism)(identity) + }.parEvalMap(Int.MaxValue)(identity) } }.through(commitBatchWithin(500, 15.seconds)) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index 5cd7f3bd6..af46fc525 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -201,8 +201,7 @@ object KafkaProducer { /** * Creates a [[KafkaProducer]] using the provided settings and - * produces record in batches, limiting the number of records - * in the same batch using [[ProducerSettings#parallelism]]. + * produces record in batches. */ def pipe[F[_], K, V]( settings: ProducerSettings[F, K, V] @@ -210,7 +209,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = - records => stream(settings).flatMap(pipe(settings, _).apply(records)) + records => stream(settings).flatMap(pipe(_).apply(records)) /** * Produces records in batches using the provided [[KafkaProducer]]. @@ -218,10 +217,9 @@ object KafkaProducer { * [[ProducerSettings#parallelism]] setting. */ def pipe[F[_]: Concurrent, K, V]( - settings: ProducerSettings[F, K, V], producer: KafkaProducer[F, K, V] ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = - _.evalMap(producer.produce).mapAsync(settings.parallelism)(identity) + _.evalMap(producer.produce).parEvalMap(Int.MaxValue)(identity) private[this] def serializeToBytes[F[_], K, V]( keySerializer: Serializer[F, K], diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index 03fe8a740..b40c1cf37 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -215,18 +215,6 @@ sealed abstract class ProducerSettings[F[_], K, V] { */ def withCloseTimeout(closeTimeout: FiniteDuration): ProducerSettings[F, K, V] - /** - * The maximum number of [[ProducerRecords]] to produce in the same batch.
- *
- * The default value is 10000. - */ - def parallelism: Int - - /** - * Creates a new [[ProducerSettings]] with the specified [[parallelism]]. - */ - def withParallelism(parallelism: Int): ProducerSettings[F, K, V] - /** * Includes the credentials properties from the provided [[KafkaCredentialStore]] */ @@ -234,14 +222,12 @@ sealed abstract class ProducerSettings[F[_], K, V] { } object ProducerSettings { - val DefaultParallelism: Int = 10000 private[this] final case class ProducerSettingsImpl[F[_], K, V]( override val keySerializer: F[Serializer[F, K]], override val valueSerializer: F[Serializer[F, V]], override val customBlockingContext: Option[ExecutionContext], override val properties: Map[String, String], - override val closeTimeout: FiniteDuration, - override val parallelism: Int + override val closeTimeout: FiniteDuration ) extends ProducerSettings[F, K, V] { override def withCustomBlockingContext(ec: ExecutionContext): ProducerSettings[F, K, V] = copy(customBlockingContext = Some(ec)) @@ -297,9 +283,6 @@ object ProducerSettings { override def withCloseTimeout(closeTimeout: FiniteDuration): ProducerSettings[F, K, V] = copy(closeTimeout = closeTimeout) - override def withParallelism(parallelism: Int): ProducerSettings[F, K, V] = - copy(parallelism = parallelism) - /** * Includes the credentials properties from the provided [[KafkaCredentialStore]] */ @@ -323,8 +306,7 @@ object ProducerSettings { properties = Map( ProducerConfig.RETRIES_CONFIG -> "0" ), - closeTimeout = 60.seconds, - parallelism = DefaultParallelism + closeTimeout = 60.seconds ) def apply[F[_], K, V]( From 8e1370c17f879f20231d689176dde5773c842db1 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 10:31:43 +0000 Subject: [PATCH 065/162] Remove ProducerResult type --- .../main/scala/fs2/kafka/KafkaProducer.scala | 14 +-- .../main/scala/fs2/kafka/ProducerResult.scala | 86 ------------------- .../kafka/TransactionalKafkaProducer.scala | 3 +- .../src/main/scala/fs2/kafka/package.scala | 3 + .../kafka/KafkaProducerConnectionSpec.scala | 4 +- .../scala/fs2/kafka/KafkaProducerSpec.scala | 4 +- .../scala/fs2/kafka/ProducerResultSpec.scala | 46 ---------- .../TransactionalKafkaProducerSpec.scala | 2 +- 8 files changed, 11 insertions(+), 151 deletions(-) delete mode 100644 modules/core/src/main/scala/fs2/kafka/ProducerResult.scala delete mode 100644 modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index af46fc525..6f94afe7e 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -15,7 +15,6 @@ import scala.jdk.CollectionConverters._ import fs2.kafka.producer.MkProducer import org.apache.kafka.clients.producer.RecordMetadata import org.apache.kafka.common.{Metric, MetricName} -import fs2.Chunk import cats.Functor import scala.annotation.nowarn @@ -70,7 +69,7 @@ object KafkaProducer { */ def produceOne_(record: ProducerRecord[K, V])(implicit F: Functor[F]): F[F[RecordMetadata]] = produceOne(record).map(_.map { res => - res.records.head.get._2 //Should always be present so get is ok + res.head.get._2 //Should always be present so get is ok }) /** @@ -80,15 +79,6 @@ object KafkaProducer { def produceOne_(topic: String, key: K, value: V)(implicit F: Functor[F]): F[F[RecordMetadata]] = produceOne_(ProducerRecord(topic, key, value)) - /** - * Produces the specified [[ProducerRecords]] without a passthrough value, - * see [[KafkaProducer.produce]] for general semantics. - */ - def produce_( - records: ProducerRecords[K, V] - )(implicit F: Functor[F]): F[F[Chunk[(ProducerRecord[K, V], RecordMetadata)]]] = - producer.produce(records).map(_.map(_.records)) - /** * Produce a single record to the specified topic using the provided key and value, * see [[KafkaProducer.produce]] for general semantics. @@ -150,7 +140,7 @@ object KafkaProducer { withProducer { (producer, blocking) => records .traverse(produceRecord(keySerializer, valueSerializer, producer, blocking)) - .map(_.sequence.map(ProducerResult(_))) + .map(_.sequence) } override def metrics: F[Map[MetricName, Metric]] = diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala b/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala deleted file mode 100644 index 60c2f9641..000000000 --- a/modules/core/src/main/scala/fs2/kafka/ProducerResult.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2018-2022 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka - -import cats.syntax.show._ -import cats.Show -import fs2.Chunk -import fs2.kafka.instances._ -import fs2.kafka.internal.syntax._ -import org.apache.kafka.clients.producer.RecordMetadata - -/** - * [[ProducerResult]] represents the result of having produced zero - * or more `ProducerRecord`s from a [[ProducerRecords]]. Finally, a - * passthrough value and `ProducerRecord`s along with respective - * `RecordMetadata` are emitted in a [[ProducerResult]].
- *
- * The [[passthrough]] and [[records]] can be retrieved from an - * existing [[ProducerResult]] instance.
- *
- * Use [[ProducerResult#apply]] to create a new [[ProducerResult]]. - */ -sealed abstract class ProducerResult[+K, +V] { - - /** - * The records produced along with respective metadata. - * Can be empty for passthrough-only. - */ - def records: Chunk[(ProducerRecord[K, V], RecordMetadata)] -} - -object ProducerResult { - private[this] final class ProducerResultImpl[+K, +V]( - override val records: Chunk[(ProducerRecord[K, V], RecordMetadata)] - ) extends ProducerResult[K, V] { - - override def toString: String = - if (records.isEmpty) - s"ProducerResult()" - else - records.mkStringAppend { - case (append, (record, metadata)) => - append(metadata.toString) - append(" -> ") - append(record.toString) - }( - start = "ProducerResult(", - sep = ", ", - end = s")" - ) - } - - /** - * Creates a new [[ProducerResult]] for having produced zero - * or more `ProducerRecord`s, finally emitting a passthrough - * value and the `ProducerRecord`s with `RecordMetadata`. - */ - def apply[K, V]( - records: Chunk[(ProducerRecord[K, V], RecordMetadata)] - ): ProducerResult[K, V] = - new ProducerResultImpl(records) - - implicit def producerResultShow[K, V]( - implicit - K: Show[K], - V: Show[V] - ): Show[ProducerResult[K, V]] = Show.show { result => - if (result.records.isEmpty) - show"ProducerResult()" - else - result.records.mkStringAppend { - case (append, (record, metadata)) => - append(metadata.show) - append(" -> ") - append(record.show) - }( - start = "ProducerResult(", - sep = ", ", - end = ")" - ) - } -} diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 5ce08769a..05d677a05 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -100,7 +100,6 @@ object TransactionalKafkaProducer { records: TransactionalProducerRecords[F, K, V] ): F[ProducerResult[K, V]] = produceTransactionWithOffsets(records) - .map(ProducerResult(_)) private[this] def produceTransactionWithOffsets[P]( records: Chunk[CommittableProducerRecords[F, K, V]] @@ -131,7 +130,7 @@ object TransactionalKafkaProducer { override def produceWithoutOffsets( records: ProducerRecords[K, V] ): F[ProducerResult[K, V]] = - produceTransaction(records, None).map(ProducerResult(_)) + produceTransaction(records, None) private[this] def produceTransaction[P]( records: Chunk[ProducerRecord[K, V]], diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 42daca580..fcc7f019d 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -12,6 +12,7 @@ import cats.effect._ import scala.collection.mutable import scala.concurrent.duration.FiniteDuration +import org.apache.kafka.clients.producer.RecordMetadata package object kafka { type Id[+A] = A @@ -56,6 +57,8 @@ package object kafka { type TransactionalProducerRecords[F[_], +K, +V] = Chunk[CommittableProducerRecords[F, K, V]] + type ProducerResult[K, V] = Chunk[(ProducerRecord[K, V], RecordMetadata)] + /** * Commits offsets in batches of every `n` offsets or time window * of length `d`, whichever happens first. If there are no offsets diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala index c68c5cdd5..dc33490f3 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala @@ -26,8 +26,8 @@ final class KafkaProducerConnectionSpec extends BaseKafkaSpec with TypeCheckedTr result2 <- Stream.eval(producer2.produce(ProducerRecords.one(producerRecordInt)).flatten) } yield (result1, result2)).compile.lastOrError.unsafeRunSync() - result1.records.toList.map(_._1) should ===(List(producerRecordString)) - result2.records.toList.map(_._1) should ===(List(producerRecordInt)) + result1.toList.map(_._1) should ===(List(producerRecordString)) + result2.toList.map(_._1) should ===(List(producerRecordInt)) val consumed = consumeNumberKeyedMessagesFrom[String, String](topic, 2) diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index 7bba87d68..9df664322 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -88,7 +88,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { } yield result).compile.lastOrError.unsafeRunSync() val records = - produced.records.map { + produced.map { case (record, _) => record.key -> record.value }.toList @@ -232,7 +232,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { case (key, value) => ProducerRecord(topic, key, value) }) - result <- Stream.eval(producer.produce_(records).flatten) + result <- Stream.eval(producer.produce(records).flatten) } yield result).compile.lastOrError.unsafeRunSync() val records = diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala deleted file mode 100644 index 0d15f1b1e..000000000 --- a/modules/core/src/test/scala/fs2/kafka/ProducerResultSpec.scala +++ /dev/null @@ -1,46 +0,0 @@ -package fs2.kafka - -import fs2.Chunk -import cats.syntax.all._ -import org.apache.kafka.clients.producer.RecordMetadata -import org.apache.kafka.common.TopicPartition - -final class ProducerResultSpec extends BaseSpec { - describe("ProducerResult") { - it("should have a Show instance and matching toString") { - val empty: Chunk[(ProducerRecord[String, String], RecordMetadata)] = Chunk.empty - - assert { - ProducerResult(empty).toString == "ProducerResult()" && - ProducerResult(empty).show == ProducerResult(empty).toString - } - - val one: Chunk[(ProducerRecord[String, String], RecordMetadata)] = - Chunk.singleton( - ProducerRecord("topic", "key", "value") - .withPartition(1) - .withTimestamp(0L) - .withHeaders(Headers(Header("key", Array[Byte]()))) -> - new RecordMetadata(new TopicPartition("topic", 0), 0L, 0, 0L, 0, 0) - ) - - assert { - ProducerResult(one).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value, headers = Headers(key -> [])))" && - ProducerResult(one).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value, headers = Headers(key -> [])))" - } - - val two: Chunk[(ProducerRecord[String, String], RecordMetadata)] = - Chunk( - ProducerRecord("topic", "key", "value").withPartition(0).withTimestamp(0L) -> - new RecordMetadata(new TopicPartition("topic", 0), 0L, 0, 0L, 0, 0), - ProducerRecord("topic", "key", "value").withPartition(1).withTimestamp(0L) -> - new RecordMetadata(new TopicPartition("topic", 1), 0L, 0, 0L, 0, 0) - ) - - assert { - ProducerResult(two).toString == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value))" && - ProducerResult(two).show == "ProducerResult(topic-0@0 -> ProducerRecord(topic = topic, partition = 0, timestamp = 0, key = key, value = value), topic-1@0 -> ProducerRecord(topic = topic, partition = 1, timestamp = 0, key = key, value = value))" - } - } - } -} diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index f8867156a..bf226a6ad 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -186,7 +186,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { } yield result).compile.lastOrError.unsafeRunSync() val records = - produced._2.records.map { + produced._2.map { case (record, _) => record.key -> record.value } From 8ddd0cb7f9b4dea72ec98a57716b6a10a55c4a55 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 12:05:47 +0000 Subject: [PATCH 066/162] Remove passthrough param from TransactionalKafkaProducer --- .../main/scala/fs2/kafka/TransactionalKafkaProducer.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 05d677a05..e7d8d6fcc 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -37,7 +37,7 @@ abstract class TransactionalKafkaProducer[F[_], K, V] { * occurs, the transaction is aborted. The returned effect succeeds if the whole * transaction completes successfully. */ - def produce[P]( + def produce( records: TransactionalProducerRecords[F, K, V] ): F[ProducerResult[K, V]] } @@ -96,12 +96,12 @@ object TransactionalKafkaProducer { WithTransactionalProducer(mk, settings) ).mapN { (keySerializer, valueSerializer, withProducer) => new TransactionalKafkaProducer.WithoutOffsets[F, K, V] { - override def produce[P]( + override def produce( records: TransactionalProducerRecords[F, K, V] ): F[ProducerResult[K, V]] = produceTransactionWithOffsets(records) - private[this] def produceTransactionWithOffsets[P]( + private[this] def produceTransactionWithOffsets( records: Chunk[CommittableProducerRecords[F, K, V]] ): F[Chunk[(ProducerRecord[K, V], RecordMetadata)]] = if (records.isEmpty) F.pure(Chunk.empty) @@ -132,7 +132,7 @@ object TransactionalKafkaProducer { ): F[ProducerResult[K, V]] = produceTransaction(records, None) - private[this] def produceTransaction[P]( + private[this] def produceTransaction( records: Chunk[ProducerRecord[K, V]], sendOffsets: Option[(KafkaByteProducer, Blocking[F]) => F[Unit]] ): F[Chunk[(ProducerRecord[K, V], RecordMetadata)]] = From 3b9d184083dc01f773c808debafce987008010d6 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 12:16:40 +0000 Subject: [PATCH 067/162] Remove references to passthrough --- docs/src/main/mdoc/producers.md | 4 ++-- .../src/main/scala/fs2/kafka/KafkaProducer.scala | 13 ++++--------- .../fs2/kafka/TransactionalKafkaProducer.scala | 5 +++-- modules/core/src/main/scala/fs2/kafka/package.scala | 3 +-- 4 files changed, 10 insertions(+), 15 deletions(-) diff --git a/docs/src/main/mdoc/producers.md b/docs/src/main/mdoc/producers.md index a9a97031c..3ab35f4fb 100644 --- a/docs/src/main/mdoc/producers.md +++ b/docs/src/main/mdoc/producers.md @@ -185,9 +185,9 @@ object ProduceExample extends IOApp { In the stream above, we're simply producing the records we receive back to the topic. -The `produce` function creates a `KafkaProducer` and produces records in `ProducerRecords`. Note that `ProducerRecords` support multiple records and a passthrough value, `committable.offset`. Once all records have been produced in the `ProducerRecords`, the passthrough will be emitted. +The `produce` function creates a `KafkaProducer` and produces records in `ProducerRecords`, which is al alias for `fs2.Chunk`. Once all records have been produced in the `ProducerRecords`, the inner effect will complete with a `ProducerResult`, which is an alias for `Chunk[(ProducerRecord[K, V], RecordMetadata)]`. -If we're producing in multiple places in our stream, we can create the `KafkaProducer` ourselves, and pass it to the `produce` function. Every `produce` allow up to `ProducerSettings#parallelism` instances of `ProducerRecords` to be batched together in the same batch. +If we're producing in multiple places in our stream, we can create the `KafkaProducer` ourselves, and pass it to the `pipe` function. ```scala mdoc:silent object PartitionedProduceExample extends IOApp { diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index 6f94afe7e..5337def60 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -22,11 +22,7 @@ import scala.concurrent.Promise /** * [[KafkaProducer]] represents a producer of Kafka records, with the - * ability to produce `ProducerRecord`s using [[produce]]. Records are - * wrapped in [[ProducerRecords]] which allow an arbitrary value, that - * is a passthrough, to be included in the result. Most often this is - * used for keeping the [[CommittableOffset]]s, in order to commit - * offsets, but any value can be used as passthrough value. + * ability to produce `ProducerRecord`s using [[produce]]. */ abstract class KafkaProducer[F[_], K, V] { @@ -64,8 +60,7 @@ object KafkaProducer { extends AnyVal { /** - * Produce a single [[ProducerRecord]] without a passthrough value, - * see [[KafkaProducer.produce]] for general semantics. + * Produce a single [[ProducerRecord]], see [[KafkaProducer.produce]] for general semantics. */ def produceOne_(record: ProducerRecord[K, V])(implicit F: Functor[F]): F[F[RecordMetadata]] = produceOne(record).map(_.map { res => @@ -73,8 +68,8 @@ object KafkaProducer { }) /** - * Produce a single record to the specified topic using the provided key and value - * without a passthrough value, see [[KafkaProducer.produce]] for general semantics. + * Produce a single record to the specified topic using the provided key and value, + * see [[KafkaProducer.produce]] for general semantics. */ def produceOne_(topic: String, key: K, value: V)(implicit F: Functor[F]): F[F[RecordMetadata]] = produceOne_(ProducerRecord(topic, key, value)) diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index e7d8d6fcc..c6c705445 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -24,8 +24,9 @@ import scala.annotation.nowarn * streams, with the ability to atomically produce `ProducerRecord`s and commit * corresponding [[CommittableOffset]]s using [[produce]].
*
- * Records are wrapped in [[TransactionalProducerRecords]] which allow an - * arbitrary passthrough value to be included in the result. + * Records are wrapped in [[TransactionalProducerRecords]], which is a chunk of + * [[CommittableProducerRecord]] which wrap zero or more records together with + * a [[CommittableOffset]]. */ abstract class TransactionalKafkaProducer[F[_], K, V] { diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index fcc7f019d..b6460759a 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -110,8 +110,7 @@ package kafka { /** * Creates a new [[TransactionalProducerRecords]] for producing exactly - * one [[CommittableProducerRecords]], emitting a [[ProducerResult]] - * with the result and specified passthrough value. + * one [[CommittableProducerRecords]] */ def one[F[_], K, V]( record: CommittableProducerRecords[F, K, V] From 2895d888f0413545be214f3a7c025fca5d712f2f Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 12:18:38 +0000 Subject: [PATCH 068/162] Update docs, simplify some tests --- docs/src/main/mdoc/producers.md | 2 -- modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala | 2 -- .../core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala | 8 ++++---- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/docs/src/main/mdoc/producers.md b/docs/src/main/mdoc/producers.md index 3ab35f4fb..5ed41e7af 100644 --- a/docs/src/main/mdoc/producers.md +++ b/docs/src/main/mdoc/producers.md @@ -130,8 +130,6 @@ The following settings are specific to the library. - `withCloseTimeout` controls the timeout when waiting for producer shutdown. Default is 60 seconds. -- `withParallelism` sets the max number of `ProducerRecords` to produce in the same batch when using the `produce` pipe. Default is 100. - - `withCreateProducer` changes how the underlying Java Kafka producer is created. The default merely creates a Java `KafkaProducer` instance using set properties, but this function allows overriding the behaviour for e.g. testing purposes. ## Producer Creation diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index 5337def60..ff32e80ad 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -198,8 +198,6 @@ object KafkaProducer { /** * Produces records in batches using the provided [[KafkaProducer]]. - * The number of records in the same batch is limited using the - * [[ProducerSettings#parallelism]] setting. */ def pipe[F[_]: Concurrent, K, V]( producer: KafkaProducer[F, K, V] diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index 9df664322..861b9aec7 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -191,12 +191,12 @@ final class KafkaProducerSpec extends BaseKafkaSpec { .eval( producer .produceOne(ProducerRecord(topic, toProduce._1, toProduce._2)) - .map(_.tupleLeft(passthrough)) + .map(_.as(passthrough)) ) result <- Stream.eval(batched) } yield result).compile.lastOrError.unsafeRunSync() - assert(result._1 == passthrough) + assert(result == passthrough) } } @@ -212,11 +212,11 @@ final class KafkaProducerSpec extends BaseKafkaSpec { _ <- Stream.eval(IO(producer.toString should startWith("KafkaProducer$"))) batched <- Stream .eval(producer.produceOne(topic, toProduce._1, toProduce._2)) - .map(_.tupleLeft(passthrough)) + .map(_.as(passthrough)) result <- Stream.eval(batched) } yield result).compile.lastOrError.unsafeRunSync() - assert(result._1 == passthrough) + assert(result == passthrough) } } From 5ba6f2f3878a73fee83f1bbc07e60159c8094593 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 15:35:33 +0000 Subject: [PATCH 069/162] Gen -> Generic --- .../main/scala/fs2/kafka/Deserializer.scala | 52 +++++++++---------- .../src/main/scala/fs2/kafka/Serializer.scala | 26 +++++----- .../src/main/scala/fs2/kafka/package.scala | 18 +++---- 3 files changed, 48 insertions(+), 48 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala index adcde9f2b..fbddc3daa 100644 --- a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala @@ -16,7 +16,7 @@ import java.util.UUID * Functional composable Kafka key- and record deserializer with * support for effect types. */ -sealed abstract class GenDeserializer[-T <: KeyOrValue, F[_], A] { +sealed abstract class GenericDeserializer[-T <: KeyOrValue, F[_], A] { /** * Attempts to deserialize the specified bytes into a value of @@ -29,27 +29,27 @@ sealed abstract class GenDeserializer[-T <: KeyOrValue, F[_], A] { * Creates a new [[Deserializer]] which applies the specified * function to the result of this [[Deserializer]]. */ - def map[B](f: A => B): GenDeserializer[T, F, B] + def map[B](f: A => B): GenericDeserializer[T, F, B] /** * Creates a new [[Deserializer]] by first deserializing * with this [[Deserializer]] and then using the result * as input to the specified function. */ - def flatMap[T0 <: T, B](f: A => GenDeserializer[T0, F, B]): GenDeserializer[T0, F, B] + def flatMap[T0 <: T, B](f: A => GenericDeserializer[T0, F, B]): GenericDeserializer[T0, F, B] /** * Creates a new [[Deserializer]] which deserializes both using * this [[Deserializer]] and that [[Deserializer]], and returns * both results in a tuple. */ - def product[T0 <: T, B](that: GenDeserializer[T0, F, B]): GenDeserializer[T0, F, (A, B)] + def product[T0 <: T, B](that: GenericDeserializer[T0, F, B]): GenericDeserializer[T0, F, (A, B)] /** * Creates a new [[Deserializer]] which handles errors by * turning them into `Either` values. */ - def attempt: GenDeserializer[T, F, Either[Throwable, A]] + def attempt: GenericDeserializer[T, F, Either[Throwable, A]] /** * Creates a new [[Deserializer]] which returns `None` when the @@ -65,7 +65,7 @@ sealed abstract class GenDeserializer[-T <: KeyOrValue, F[_], A] { def suspend: Deserializer[F, A] } -object GenDeserializer { +object GenericDeserializer { def apply[F[_], A](implicit deserializer: Deserializer[F, A]): Deserializer[F, A] = deserializer /** Alias for [[Deserializer#identity]]. */ @@ -138,8 +138,8 @@ object GenDeserializer { } override def flatMap[T0 <: KeyOrValue, B]( - f: A => GenDeserializer[T0, F, B] - ): GenDeserializer[T0, F, B] = + f: A => GenericDeserializer[T0, F, B] + ): GenericDeserializer[T0, F, B] = Deserializer.instance { (topic, headers, bytes) => deserialize(topic, headers, bytes).flatMap { a => f(a).deserialize(topic, headers, bytes) @@ -147,7 +147,7 @@ object GenDeserializer { } override def product[T0 <: KeyOrValue, B]( - that: GenDeserializer[T0, F, B] + that: GenericDeserializer[T0, F, B] ): Deserializer[F, (A, B)] = Deserializer.instance { (topic, headers, bytes) => val a = deserialize(topic, headers, bytes) @@ -196,8 +196,8 @@ object GenDeserializer { * from which the serialized bytes came. */ def topic[T <: KeyOrValue, F[_], A]( - f: PartialFunction[String, GenDeserializer[T, F, A]] - )(implicit F: Sync[F]): GenDeserializer[T, F, A] = + f: PartialFunction[String, GenericDeserializer[T, F, A]] + )(implicit F: Sync[F]): GenericDeserializer[T, F, A] = Deserializer.instance { (topic, headers, bytes) => f.applyOrElse(topic, unexpectedTopic) .deserialize(topic, headers, bytes) @@ -238,44 +238,44 @@ object GenDeserializer { implicit def monadError[T <: KeyOrValue, F[_]]( implicit F: Sync[F] - ): MonadError[GenDeserializer[T, F, *], Throwable] = - new MonadError[GenDeserializer[T, F, *], Throwable] { - override def pure[A](a: A): GenDeserializer[T, F, A] = + ): MonadError[GenericDeserializer[T, F, *], Throwable] = + new MonadError[GenericDeserializer[T, F, *], Throwable] { + override def pure[A](a: A): GenericDeserializer[T, F, A] = Deserializer.const(a) override def map[A, B]( - deserializer: GenDeserializer[T, F, A] - )(f: A => B): GenDeserializer[T, F, B] = + deserializer: GenericDeserializer[T, F, A] + )(f: A => B): GenericDeserializer[T, F, B] = deserializer.map(f) override def flatMap[A, B]( - deserializer: GenDeserializer[T, F, A] - )(f: A => GenDeserializer[T, F, B]): GenDeserializer[T, F, B] = + deserializer: GenericDeserializer[T, F, A] + )(f: A => GenericDeserializer[T, F, B]): GenericDeserializer[T, F, B] = deserializer.flatMap(f) override def product[A, B]( - first: GenDeserializer[T, F, A], - second: GenDeserializer[T, F, B] - ): GenDeserializer[T, F, (A, B)] = + first: GenericDeserializer[T, F, A], + second: GenericDeserializer[T, F, B] + ): GenericDeserializer[T, F, (A, B)] = first.product(second) override def tailRecM[A, B]( a: A - )(f: A => GenDeserializer[T, F, Either[A, B]]): GenDeserializer[T, F, B] = + )(f: A => GenericDeserializer[T, F, Either[A, B]]): GenericDeserializer[T, F, B] = Deserializer.instance { (topic, headers, bytes) => F.tailRecM(a)(f(_).deserialize(topic, headers, bytes)) } - override def handleErrorWith[A](fa: GenDeserializer[T, F, A])( - f: Throwable => GenDeserializer[T, F, A] - ): GenDeserializer[T, F, A] = + override def handleErrorWith[A](fa: GenericDeserializer[T, F, A])( + f: Throwable => GenericDeserializer[T, F, A] + ): GenericDeserializer[T, F, A] = Deserializer.instance { (topic, headers, bytes) => F.handleErrorWith(fa.deserialize(topic, headers, bytes)) { throwable => f(throwable).deserialize(topic, headers, bytes) } } - override def raiseError[A](e: Throwable): GenDeserializer[T, F, A] = + override def raiseError[A](e: Throwable): GenericDeserializer[T, F, A] = Deserializer.fail(e) } diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 61a23bdc1..922b22425 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -12,7 +12,7 @@ import cats.syntax.all._ import java.nio.charset.{Charset, StandardCharsets} import java.util.UUID -sealed abstract class GenSerializer[-T <: KeyOrValue, F[_], A] { +sealed abstract class GenericSerializer[-T <: KeyOrValue, F[_], A] { /** * Attempts to serialize the specified value of type `A` into @@ -26,19 +26,19 @@ sealed abstract class GenSerializer[-T <: KeyOrValue, F[_], A] { * function `f` on a value of type `B`, and then serializes * the result with this [[Serializer]]. */ - def contramap[B](f: B => A): GenSerializer[T, F, B] + def contramap[B](f: B => A): GenericSerializer[T, F, B] /** * Creates a new [[Serializer]] which applies the specified * function `f` on the output bytes of this [[Serializer]]. */ - def mapBytes(f: Array[Byte] => Array[Byte]): GenSerializer[T, F, A] + def mapBytes(f: Array[Byte] => Array[Byte]): GenericSerializer[T, F, A] /** * Creates a new [[Serializer]] which serializes `Some` values * using this [[Serializer]], and serializes `None` as `null`. */ - def option: GenSerializer[T, F, Option[A]] + def option: GenericSerializer[T, F, Option[A]] /** * Creates a new [[Serializer]] which suspends serialization, @@ -51,7 +51,7 @@ sealed abstract class GenSerializer[-T <: KeyOrValue, F[_], A] { * Functional composable Kafka key- and record serializer with * support for effect types. */ -object GenSerializer { +object GenericSerializer { def apply[F[_], A](implicit serializer: Serializer[F, A]): Serializer[F, A] = serializer @@ -175,8 +175,8 @@ object GenSerializer { * which the bytes are going to be sent. */ def topic[T <: KeyOrValue, F[_], A]( - f: PartialFunction[String, GenSerializer[T, F, A]] - )(implicit F: Sync[F]): GenSerializer[T, F, A] = + f: PartialFunction[String, GenericSerializer[T, F, A]] + )(implicit F: Sync[F]): GenericSerializer[T, F, A] = Serializer.instance[F, A] { (topic, headers, a) => f.applyOrElse(topic, unexpectedTopic) .serialize(topic, headers, a) @@ -210,15 +210,15 @@ object GenSerializer { * serializes `Some` values using the serializer for type `A`. */ implicit def option[T <: KeyOrValue, F[_], A]( - implicit serializer: GenSerializer[T, F, A] - ): GenSerializer[T, F, Option[A]] = + implicit serializer: GenericSerializer[T, F, A] + ): GenericSerializer[T, F, Option[A]] = serializer.option - implicit def contravariant[T <: KeyOrValue, F[_]]: Contravariant[GenSerializer[T, F, *]] = - new Contravariant[GenSerializer[T, F, *]] { + implicit def contravariant[T <: KeyOrValue, F[_]]: Contravariant[GenericSerializer[T, F, *]] = + new Contravariant[GenericSerializer[T, F, *]] { override def contramap[A, B]( - serializer: GenSerializer[T, F, A] - )(f: B => A): GenSerializer[T, F, B] = + serializer: GenericSerializer[T, F, A] + )(f: B => A): GenericSerializer[T, F, B] = serializer.contramap(f) } diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index eaabf5f13..b6c6dd627 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -59,15 +59,15 @@ package object kafka { ): Pipe[F, CommittableOffset[F], Unit] = _.groupWithin(n, d).evalMap(CommittableOffsetBatch.fromFoldable(_).commit) - type Serializer[F[_], A] = GenSerializer[KeyOrValue, F, A] - type KeySerializer[F[_], A] = GenSerializer[Key, F, A] - type ValueSerializer[F[_], A] = GenSerializer[Value, F, A] - val Serializer: GenSerializer.type = GenSerializer - - type Deserializer[F[_], A] = GenDeserializer[KeyOrValue, F, A] - type KeyDeserializer[F[_], A] = GenDeserializer[Key, F, A] - type ValueDeserializer[F[_], A] = GenDeserializer[Value, F, A] - val Deserializer: GenDeserializer.type = GenDeserializer + type Serializer[F[_], A] = GenericSerializer[KeyOrValue, F, A] + type KeySerializer[F[_], A] = GenericSerializer[Key, F, A] + type ValueSerializer[F[_], A] = GenericSerializer[Value, F, A] + val Serializer: GenericSerializer.type = GenericSerializer + + type Deserializer[F[_], A] = GenericDeserializer[KeyOrValue, F, A] + type KeyDeserializer[F[_], A] = GenericDeserializer[Key, F, A] + type ValueDeserializer[F[_], A] = GenericDeserializer[Value, F, A] + val Deserializer: GenericDeserializer.type = GenericDeserializer } package kafka { From e535ab6c6169a7419df894e4bb884409f533b735 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 15:24:36 +0000 Subject: [PATCH 070/162] Allocate serdes in resource --- .../main/scala/fs2/kafka/KafkaProducer.scala | 2 +- .../fs2/kafka/KafkaProducerConnection.scala | 4 +- .../scala/fs2/kafka/ProducerSettings.scala | 29 ++++----- .../scala/fs2/kafka/RecordSerializer.scala | 35 ++++------- .../kafka/TransactionalKafkaProducer.scala | 4 +- .../kafka/KafkaProducerConnectionSpec.scala | 2 +- .../fs2/kafka/ProducerSettingsSpec.scala | 4 +- .../fs2/kafka/vulcan/AvroSerializer.scala | 28 +++++---- .../fs2/kafka/vulcan/AvroSerializerSpec.scala | 8 +-- .../scala/fs2/kafka/vulcan/PackageSpec.scala | 60 ++++++++++++------- 10 files changed, 94 insertions(+), 82 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index f9f19f121..77eae127c 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -120,7 +120,7 @@ object KafkaProducer { def resource[F[_], K, V]( settings: ProducerSettings[F, K, V] )(implicit F: Async[F], mk: MkProducer[F]): Resource[F, KafkaProducer.Metrics[F, K, V]] = - KafkaProducerConnection.resource(settings).evalMap(_.withSerializersFrom(settings)) + KafkaProducerConnection.resource(settings).flatMap(_.withSerializersFrom(settings)) private[kafka] def from[F[_], K, V]( connection: KafkaProducerConnection[F], diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala index 284e68e59..f094af4d6 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala @@ -53,7 +53,7 @@ sealed abstract class KafkaProducerConnection[F[_]] { */ def withSerializersFrom[K, V]( settings: ProducerSettings[F, K, V] - ): F[KafkaProducer.Metrics[F, K, V]] + ): Resource[F, KafkaProducer.Metrics[F, K, V]] } object KafkaProducerConnection { @@ -137,7 +137,7 @@ object KafkaProducerConnection { override def withSerializersFrom[K, V]( settings: ProducerSettings[G, K, V] - ): G[KafkaProducer.Metrics[G, K, V]] = + ): Resource[G, KafkaProducer.Metrics[G, K, V]] = (settings.keySerializer, settings.valueSerializer).mapN(withSerializers) } diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index f7844debe..3fe3f14a2 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -6,12 +6,13 @@ package fs2.kafka -import cats.{Applicative, Show} +import cats.Show import fs2.kafka.security.KafkaCredentialStore import org.apache.kafka.clients.producer.ProducerConfig import scala.concurrent.ExecutionContext import scala.concurrent.duration._ +import cats.effect.Resource /** * [[ProducerSettings]] contain settings necessary to create a [[KafkaProducer]]. @@ -31,12 +32,12 @@ sealed abstract class ProducerSettings[F[_], K, V] { /** * The `Serializer` to use for serializing record keys. */ - def keySerializer: F[KeySerializer[F, K]] + def keySerializer: Resource[F, KeySerializer[F, K]] /** * The `Serializer` to use for serializing record values. */ - def valueSerializer: F[ValueSerializer[F, V]] + def valueSerializer: Resource[F, ValueSerializer[F, V]] /** * A custom [[ExecutionContext]] to use for blocking Kafka operations. @@ -223,8 +224,8 @@ sealed abstract class ProducerSettings[F[_], K, V] { object ProducerSettings { private[this] final case class ProducerSettingsImpl[F[_], K, V]( - override val keySerializer: F[KeySerializer[F, K]], - override val valueSerializer: F[ValueSerializer[F, V]], + override val keySerializer: Resource[F, KeySerializer[F, K]], + override val valueSerializer: Resource[F, ValueSerializer[F, V]], override val customBlockingContext: Option[ExecutionContext], override val properties: Map[String, String], override val closeTimeout: FiniteDuration @@ -296,8 +297,8 @@ object ProducerSettings { } private[this] def create[F[_], K, V]( - keySerializer: F[KeySerializer[F, K]], - valueSerializer: F[ValueSerializer[F, V]] + keySerializer: Resource[F, KeySerializer[F, K]], + valueSerializer: Resource[F, ValueSerializer[F, V]] ): ProducerSettings[F, K, V] = ProducerSettingsImpl( keySerializer = keySerializer, @@ -312,27 +313,27 @@ object ProducerSettings { def apply[F[_], K, V]( keySerializer: KeySerializer[F, K], valueSerializer: ValueSerializer[F, V] - )(implicit F: Applicative[F]): ProducerSettings[F, K, V] = + ): ProducerSettings[F, K, V] = create( - keySerializer = F.pure(keySerializer), - valueSerializer = F.pure(valueSerializer) + keySerializer = Resource.pure(keySerializer), + valueSerializer = Resource.pure(valueSerializer) ) def apply[F[_], K, V]( keySerializer: RecordSerializer[F, K], valueSerializer: ValueSerializer[F, V] - )(implicit F: Applicative[F]): ProducerSettings[F, K, V] = + ): ProducerSettings[F, K, V] = create( keySerializer = keySerializer.forKey, - valueSerializer = F.pure(valueSerializer) + valueSerializer = Resource.pure(valueSerializer) ) def apply[F[_], K, V]( keySerializer: KeySerializer[F, K], valueSerializer: RecordSerializer[F, V] - )(implicit F: Applicative[F]): ProducerSettings[F, K, V] = + ): ProducerSettings[F, K, V] = create( - keySerializer = F.pure(keySerializer), + keySerializer = Resource.pure(keySerializer), valueSerializer = valueSerializer.forValue ) diff --git a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala index e961132cd..12d4c5121 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala @@ -6,8 +6,7 @@ package fs2.kafka -import cats._ -import cats.syntax.all._ +import cats.effect.Resource /** * Serializer which may vary depending on whether a record @@ -15,9 +14,9 @@ import cats.syntax.all._ * a creation effect. */ sealed abstract class RecordSerializer[F[_], A] { - def forKey: F[KeySerializer[F, A]] + def forKey: Resource[F, KeySerializer[F, A]] - def forValue: F[ValueSerializer[F, A]] + def forValue: Resource[F, ValueSerializer[F, A]] } object RecordSerializer { @@ -26,26 +25,26 @@ object RecordSerializer { ): RecordSerializer[F, A] = serializer - def const[F[_]: Functor, A]( - serializer: => F[Serializer[F, A]] + def const[F[_], A]( + serializer: => Resource[F, Serializer[F, A]] ): RecordSerializer[F, A] = RecordSerializer.instance( - forKey = serializer.widen, - forValue = serializer.widen + forKey = serializer, + forValue = serializer ) def instance[F[_], A]( - forKey: => F[KeySerializer[F, A]], - forValue: => F[ValueSerializer[F, A]] + forKey: => Resource[F, KeySerializer[F, A]], + forValue: => Resource[F, ValueSerializer[F, A]] ): RecordSerializer[F, A] = { def _forKey = forKey def _forValue = forValue new RecordSerializer[F, A] { - override def forKey: F[KeySerializer[F, A]] = + override def forKey: Resource[F, KeySerializer[F, A]] = _forKey - override def forValue: F[ValueSerializer[F, A]] = + override def forValue: Resource[F, ValueSerializer[F, A]] = _forValue override def toString: String = @@ -53,14 +52,6 @@ object RecordSerializer { } } - def lift[F[_], A](serializer: => Serializer[F, A])( - implicit F: Applicative[F] - ): RecordSerializer[F, A] = - RecordSerializer.const(F.pure(serializer)) - - implicit def lift[F[_], A]( - implicit F: Applicative[F], - serializer: Serializer[F, A] - ): RecordSerializer[F, A] = - RecordSerializer.lift(serializer) + implicit def lift[F[_], A](implicit serializer: => Serializer[F, A]): RecordSerializer[F, A] = + RecordSerializer.const(Resource.pure(serializer)) } diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index c6c705445..8a0147123 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -92,8 +92,8 @@ object TransactionalKafkaProducer { mk: MkProducer[F] ): Resource[F, TransactionalKafkaProducer.WithoutOffsets[F, K, V]] = ( - Resource.eval(settings.producerSettings.keySerializer), - Resource.eval(settings.producerSettings.valueSerializer), + settings.producerSettings.keySerializer, + settings.producerSettings.valueSerializer, WithTransactionalProducer(mk, settings) ).mapN { (keySerializer, valueSerializer, withProducer) => new TransactionalKafkaProducer.WithoutOffsets[F, K, V] { diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala index dc33490f3..fe22dadc3 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala @@ -17,7 +17,7 @@ final class KafkaProducerConnectionSpec extends BaseKafkaSpec with TypeCheckedTr (for { settings <- Stream(producerSettings[IO]) producerConnection <- KafkaProducerConnection.stream(settings) - producer1 <- Stream.eval(producerConnection.withSerializersFrom(settings)) + producer1 <- Stream.resource(producerConnection.withSerializersFrom(settings)) serializer2 = Serializer.string[IO].contramap[Int](_.toString) producer2 = producerConnection.withSerializers(serializer2, serializer2) result1 <- Stream.eval( diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala index 2eca0e1c3..0ac3a93af 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala @@ -157,8 +157,8 @@ final class ProducerSettingsSpec extends BaseSpec { RecordSerializer.lift(serializerInstance) ProducerSettings[IO, Int, Int] - ProducerSettings[IO, String, Int].keySerializer.unsafeRunSync() shouldBe serializerInstance - ProducerSettings[IO, Int, String].valueSerializer.unsafeRunSync() shouldBe serializerInstance + ProducerSettings[IO, String, Int].keySerializer.use(IO.pure).unsafeRunSync() shouldBe serializerInstance + ProducerSettings[IO, Int, String].valueSerializer.use(IO.pure).unsafeRunSync() shouldBe serializerInstance ProducerSettings[IO, String, String] } diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index e0458e98d..fa00900a8 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -8,8 +8,8 @@ package fs2.kafka.vulcan import _root_.vulcan.Codec import cats.effect.Sync -import cats.syntax.all._ import fs2.kafka.{RecordSerializer, Serializer} +import cats.effect.kernel.Resource final class AvroSerializer[A] private[vulcan] ( private val codec: Codec[A] @@ -17,22 +17,24 @@ final class AvroSerializer[A] private[vulcan] ( def using[F[_]]( settings: AvroSettings[F] )(implicit F: Sync[F]): RecordSerializer[F, A] = { - val createSerializer: Boolean => F[Serializer[F, A]] = - settings.createAvroSerializer(_).map { - case (serializer, _) => - Serializer.instance { (topic, _, a) => - F.defer { - codec.encode(a) match { - case Right(value) => F.pure(serializer.serialize(topic, value)) - case Left(error) => F.raiseError(error.throwable) + def createSerializer(isKey: Boolean) : Resource[F, Serializer[F, A]] = + Resource + .make(settings.createAvroSerializer(isKey)) { case (ser, _) => F.delay(ser.close()) } + .map { + case (serializer, _) => + Serializer.instance { (topic, _, a) => + F.defer { + codec.encode(a) match { + case Right(value) => F.pure(serializer.serialize(topic, value)) + case Left(error) => F.raiseError(error.throwable) + } } } - } - } + } RecordSerializer.instance( - forKey = createSerializer(true).widen, - forValue = createSerializer(false).widen + forKey = createSerializer(true), + forValue = createSerializer(false) ) } diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala index ca91ed7a0..deb74ee7f 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala @@ -12,8 +12,8 @@ final class AvroSerializerSpec extends AnyFunSpec { val serializer = AvroSerializer[Int].using(avroSettings) - assert(serializer.forKey.attempt.unsafeRunSync().isRight) - assert(serializer.forValue.attempt.unsafeRunSync().isRight) + assert(serializer.forKey.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(serializer.forValue.use(IO.pure).attempt.unsafeRunSync().isRight) } it("raises schema errors") { @@ -27,8 +27,8 @@ final class AvroSerializerSpec extends AnyFunSpec { val serializer = avroSerializer(codec).using(avroSettings) - assert(serializer.forKey.attempt.unsafeRunSync().isRight) - assert(serializer.forValue.attempt.unsafeRunSync().isRight) + assert(serializer.forKey.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(serializer.forValue.use(IO.pure).attempt.unsafeRunSync().isRight) } it("toString") { diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index a790a18d6..3aee2f6ca 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -26,33 +26,51 @@ final class PackageSpec extends AnyFunSpec { describe("avroSerializer/avroDeserializer") { it("should be able to do roundtrip serialization") { - (for { - serializer <- avroSerializer[Test].using(avroSettings).forValue - test = Test("test") - serialized <- serializer.serialize("topic", Headers.empty, test) - deserializer <- avroDeserializer[Test].using(avroSettings).forValue - deserialized <- deserializer.deserialize("topic", Headers.empty, serialized) - } yield assert(deserialized == test)).unsafeRunSync() + avroSerializer[Test] + .using(avroSettings) + .forValue + .use { serializer => + val test = Test("test") + + for { + serialized <- serializer.serialize("topic", Headers.empty, test) + deserializer <- avroDeserializer[Test].using(avroSettings).forValue + deserialized <- deserializer.deserialize("topic", Headers.empty, serialized) + } yield assert(deserialized == test) + } + .unsafeRunSync() } it("should be able to do roundtrip serialization using compatible schemas") { - (for { - serializer <- avroSerializer[Test2].using(avroSettings).forValue - test2 = Test2("test", 42) - serialized <- serializer.serialize("topic2", Headers.empty, test2) - deserializer <- avroDeserializer[Test].using(avroSettings).forValue - deserialized <- deserializer.deserialize("topic2", Headers.empty, serialized) - } yield assert(deserialized == Test("test"))).unsafeRunSync() + avroSerializer[Test2] + .using(avroSettings) + .forValue + .use { serializer => + val test2 = Test2("test", 42) + for { + + serialized <- serializer.serialize("topic2", Headers.empty, test2) + deserializer <- avroDeserializer[Test].using(avroSettings).forValue + deserialized <- deserializer.deserialize("topic2", Headers.empty, serialized) + } yield assert(deserialized == Test("test")) + } + .unsafeRunSync() } it("should error when reader and writer schemas have mismatching logical types") { - (for { - serializer <- avroSerializer[Long].using(avroSettings).forValue - rawLong = 42L - serialized <- serializer.serialize("topic3", Headers.empty, rawLong) - deserializer <- avroDeserializer[Instant].using(avroSettings).forValue - deserialized <- deserializer.deserialize("topic3", Headers.empty, serialized).attempt - } yield assert(deserialized.isLeft)).unsafeRunSync() + avroSerializer[Long] + .using(avroSettings) + .forValue + .use { serializer => + val rawLong = 42L + + for { + serialized <- serializer.serialize("topic3", Headers.empty, rawLong) + deserializer <- avroDeserializer[Instant].using(avroSettings).forValue + deserialized <- deserializer.deserialize("topic3", Headers.empty, serialized).attempt + } yield assert(deserialized.isLeft) + } + .unsafeRunSync() } } From 6209e72704ffa44afd88f0d69bbd33cded928222 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 14 Mar 2022 15:33:30 +0000 Subject: [PATCH 071/162] Allocate deserializers in Resource --- .../scala/fs2/kafka/ConsumerSettings.scala | 29 ++++---- .../main/scala/fs2/kafka/KafkaConsumer.scala | 4 +- .../scala/fs2/kafka/RecordDeserializer.scala | 37 ++++------ .../fs2/kafka/ConsumerSettingsSpec.scala | 2 + .../fs2/kafka/ProducerSettingsSpec.scala | 8 ++- .../fs2/kafka/vulcan/AvroDeserializer.scala | 50 +++++++------- .../fs2/kafka/vulcan/AvroSerializer.scala | 2 +- .../kafka/vulcan/AvroDeserializerSpec.scala | 8 +-- .../scala/fs2/kafka/vulcan/PackageSpec.scala | 69 ++++++++++--------- 9 files changed, 107 insertions(+), 102 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index 6c7b52552..820f71d7a 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -6,7 +6,8 @@ package fs2.kafka -import cats.{Applicative, Show} +import cats.Show +import cats.effect.Resource import fs2.kafka.security.KafkaCredentialStore import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.common.requests.OffsetFetchResponse @@ -40,12 +41,12 @@ sealed abstract class ConsumerSettings[F[_], K, V] { /** * The `Deserializer` to use for deserializing record keys. */ - def keyDeserializer: F[KeyDeserializer[F, K]] + def keyDeserializer: Resource[F, KeyDeserializer[F, K]] /** * The `Deserializer` to use for deserializing record values. */ - def valueDeserializer: F[ValueDeserializer[F, V]] + def valueDeserializer: Resource[F, ValueDeserializer[F, V]] /** * A custom `ExecutionContext` to use for blocking Kafka operations. If not @@ -395,8 +396,8 @@ sealed abstract class ConsumerSettings[F[_], K, V] { object ConsumerSettings { private[this] final case class ConsumerSettingsImpl[F[_], K, V]( - override val keyDeserializer: F[KeyDeserializer[F, K]], - override val valueDeserializer: F[ValueDeserializer[F, V]], + override val keyDeserializer: Resource[F, KeyDeserializer[F, K]], + override val valueDeserializer: Resource[F, ValueDeserializer[F, V]], override val customBlockingContext: Option[ExecutionContext], override val properties: Map[String, String], override val closeTimeout: FiniteDuration, @@ -530,8 +531,8 @@ object ConsumerSettings { } private[this] def create[F[_], K, V]( - keyDeserializer: F[KeyDeserializer[F, K]], - valueDeserializer: F[ValueDeserializer[F, V]] + keyDeserializer: Resource[F, KeyDeserializer[F, K]], + valueDeserializer: Resource[F, ValueDeserializer[F, V]] ): ConsumerSettings[F, K, V] = ConsumerSettingsImpl( customBlockingContext = None, @@ -553,27 +554,27 @@ object ConsumerSettings { def apply[F[_], K, V]( keyDeserializer: KeyDeserializer[F, K], valueDeserializer: ValueDeserializer[F, V] - )(implicit F: Applicative[F]): ConsumerSettings[F, K, V] = + ): ConsumerSettings[F, K, V] = create( - keyDeserializer = F.pure(keyDeserializer), - valueDeserializer = F.pure(valueDeserializer) + keyDeserializer = Resource.pure(keyDeserializer), + valueDeserializer = Resource.pure(valueDeserializer) ) def apply[F[_], K, V]( keyDeserializer: RecordDeserializer[F, K], valueDeserializer: ValueDeserializer[F, V] - )(implicit F: Applicative[F]): ConsumerSettings[F, K, V] = + ): ConsumerSettings[F, K, V] = create( keyDeserializer = keyDeserializer.forKey, - valueDeserializer = F.pure(valueDeserializer) + valueDeserializer = Resource.pure(valueDeserializer) ) def apply[F[_], K, V]( keyDeserializer: KeyDeserializer[F, K], valueDeserializer: RecordDeserializer[F, V] - )(implicit F: Applicative[F]): ConsumerSettings[F, K, V] = + ): ConsumerSettings[F, K, V] = create( - keyDeserializer = F.pure(keyDeserializer), + keyDeserializer = Resource.pure(keyDeserializer), valueDeserializer = valueDeserializer.forValue ) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala index 9b5869249..ce0406660 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala @@ -618,8 +618,8 @@ object KafkaConsumer { mk: MkConsumer[F] ): Resource[F, KafkaConsumer[F, K, V]] = for { - keyDeserializer <- Resource.eval(settings.keyDeserializer) - valueDeserializer <- Resource.eval(settings.valueDeserializer) + keyDeserializer <- settings.keyDeserializer + valueDeserializer <- settings.valueDeserializer id <- Resource.eval(F.delay(new Object().hashCode)) jitter <- Resource.eval(Jitter.default[F]) logging <- Resource.eval(Logging.default[F](id)) diff --git a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala index 652194947..3478c4009 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala @@ -6,8 +6,7 @@ package fs2.kafka -import cats.syntax.all._ -import cats.{Applicative, Functor} +import cats.effect.Resource /** * Deserializer which may vary depending on whether a record @@ -15,16 +14,16 @@ import cats.{Applicative, Functor} * a creation effect. */ sealed abstract class RecordDeserializer[F[_], A] { - def forKey: F[KeyDeserializer[F, A]] + def forKey: Resource[F, KeyDeserializer[F, A]] - def forValue: F[ValueDeserializer[F, A]] + def forValue: Resource[F, ValueDeserializer[F, A]] /** * Returns a new [[RecordDeserializer]] instance that will catch deserialization * errors and return them as a value, allowing user code to handle them without * causing the consumer to fail. */ - final def attempt(implicit F: Functor[F]): RecordDeserializer[F, Either[Throwable, A]] = + final def attempt: RecordDeserializer[F, Either[Throwable, A]] = RecordDeserializer.instance(forKey.map(_.attempt), forValue.map(_.attempt)) } @@ -34,26 +33,26 @@ object RecordDeserializer { ): RecordDeserializer[F, A] = deserializer - def const[F[_]: Functor, A]( - deserializer: => F[Deserializer[F, A]] + def const[F[_], A]( + deserializer: => Resource[F, Deserializer[F, A]] ): RecordDeserializer[F, A] = RecordDeserializer.instance( - forKey = deserializer.widen, - forValue = deserializer.widen + forKey = deserializer, + forValue = deserializer ) def instance[F[_], A]( - forKey: => F[KeyDeserializer[F, A]], - forValue: => F[ValueDeserializer[F, A]] + forKey: => Resource[F, KeyDeserializer[F, A]], + forValue: => Resource[F, ValueDeserializer[F, A]] ): RecordDeserializer[F, A] = { def _forKey = forKey def _forValue = forValue new RecordDeserializer[F, A] { - override def forKey: F[KeyDeserializer[F, A]] = + override def forKey: Resource[F, KeyDeserializer[F, A]] = _forKey - override def forValue: F[ValueDeserializer[F, A]] = + override def forValue: Resource[F, ValueDeserializer[F, A]] = _forValue override def toString: String = @@ -61,14 +60,6 @@ object RecordDeserializer { } } - def lift[F[_], A](deserializer: => Deserializer[F, A])( - implicit F: Applicative[F] - ): RecordDeserializer[F, A] = - RecordDeserializer.const(F.pure(deserializer)) - - implicit def lift[F[_], A]( - implicit F: Applicative[F], - deserializer: Deserializer[F, A] - ): RecordDeserializer[F, A] = - RecordDeserializer.lift(deserializer) + implicit def lift[F[_], A](implicit deserializer: Deserializer[F, A]): RecordDeserializer[F, A] = + RecordDeserializer.const(Resource.pure(deserializer)) } diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala index cb5e96107..55d7ffc22 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala @@ -285,8 +285,10 @@ final class ConsumerSettingsSpec extends BaseSpec { ConsumerSettings[IO, Int, Int] ConsumerSettings[IO, String, Int].keyDeserializer + .use(IO.pure) .unsafeRunSync() shouldBe deserializerInstance ConsumerSettings[IO, Int, String].valueDeserializer + .use(IO.pure) .unsafeRunSync() shouldBe deserializerInstance ConsumerSettings[IO, String, String] } diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala index 0ac3a93af..972049b90 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala @@ -157,8 +157,12 @@ final class ProducerSettingsSpec extends BaseSpec { RecordSerializer.lift(serializerInstance) ProducerSettings[IO, Int, Int] - ProducerSettings[IO, String, Int].keySerializer.use(IO.pure).unsafeRunSync() shouldBe serializerInstance - ProducerSettings[IO, Int, String].valueSerializer.use(IO.pure).unsafeRunSync() shouldBe serializerInstance + ProducerSettings[IO, String, Int].keySerializer + .use(IO.pure) + .unsafeRunSync() shouldBe serializerInstance + ProducerSettings[IO, Int, String].valueSerializer + .use(IO.pure) + .unsafeRunSync() shouldBe serializerInstance ProducerSettings[IO, String, String] } diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index 6405c077e..764957c00 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -8,7 +8,7 @@ package fs2.kafka.vulcan import _root_.vulcan.Codec import cats.effect.Sync -import cats.syntax.all._ +import cats.effect.Resource import fs2.kafka.{Deserializer, RecordDeserializer} import io.confluent.kafka.schemaregistry.avro.AvroSchema import java.nio.ByteBuffer @@ -21,38 +21,42 @@ final class AvroDeserializer[A] private[vulcan] ( )(implicit F: Sync[F]): RecordDeserializer[F, A] = codec.schema match { case Right(schema) => - val createDeserializer: Boolean => F[Deserializer[F, A]] = - settings.createAvroDeserializer(_).map { - case (deserializer, schemaRegistryClient) => - Deserializer.instance { (topic, _, bytes) => - F.defer { - val writerSchemaId = - ByteBuffer.wrap(bytes).getInt(1) // skip magic byte + def createDeserializer(isKey: Boolean): Resource[F, Deserializer[F, A]] = + Resource + .make(settings.createAvroDeserializer(isKey)) { + case (deserializer, _) => F.delay(deserializer.close()) + } + .map { + case (deserializer, schemaRegistryClient) => + Deserializer.instance { (topic, _, bytes) => + F.defer { + val writerSchemaId = + ByteBuffer.wrap(bytes).getInt(1) // skip magic byte - val writerSchema = { - val schema = schemaRegistryClient.getSchemaById(writerSchemaId) - if (schema.isInstanceOf[AvroSchema]) - schema.asInstanceOf[AvroSchema].rawSchema() - else - null - } + val writerSchema = { + val schema = schemaRegistryClient.getSchemaById(writerSchemaId) + if (schema.isInstanceOf[AvroSchema]) + schema.asInstanceOf[AvroSchema].rawSchema() + else + null + } - codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { - case Right(a) => F.pure(a) - case Left(error) => F.raiseError(error.throwable) + codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { + case Right(a) => F.pure(a) + case Left(error) => F.raiseError(error.throwable) + } } } - } - } + } RecordDeserializer.instance( - forKey = createDeserializer(true).widen, - forValue = createDeserializer(false).widen + forKey = createDeserializer(true), + forValue = createDeserializer(false) ) case Left(error) => RecordDeserializer.const { - F.raiseError(error.throwable) + Resource.raiseError(error.throwable) } } diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index fa00900a8..fa575d05e 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -17,7 +17,7 @@ final class AvroSerializer[A] private[vulcan] ( def using[F[_]]( settings: AvroSettings[F] )(implicit F: Sync[F]): RecordSerializer[F, A] = { - def createSerializer(isKey: Boolean) : Resource[F, Serializer[F, A]] = + def createSerializer(isKey: Boolean): Resource[F, Serializer[F, A]] = Resource .make(settings.createAvroSerializer(isKey)) { case (ser, _) => F.delay(ser.close()) } .map { diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala index fccbb9fd6..3d6d9780f 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala @@ -12,8 +12,8 @@ final class AvroDeserializerSpec extends AnyFunSpec { val deserializer = AvroDeserializer[Int].using(avroSettings) - assert(deserializer.forKey.attempt.unsafeRunSync().isRight) - assert(deserializer.forValue.attempt.unsafeRunSync().isRight) + assert(deserializer.forKey.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(deserializer.forValue.use(IO.pure).attempt.unsafeRunSync().isRight) } it("raises schema errors") { @@ -27,8 +27,8 @@ final class AvroDeserializerSpec extends AnyFunSpec { val deserializer = avroDeserializer(codec).using(avroSettings) - assert(deserializer.forKey.attempt.unsafeRunSync().isLeft) - assert(deserializer.forValue.attempt.unsafeRunSync().isLeft) + assert(deserializer.forKey.use(IO.pure).attempt.unsafeRunSync().isLeft) + assert(deserializer.forValue.use(IO.pure).attempt.unsafeRunSync().isLeft) } it("toString") { diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index 3aee2f6ca..e44f098c9 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -26,49 +26,52 @@ final class PackageSpec extends AnyFunSpec { describe("avroSerializer/avroDeserializer") { it("should be able to do roundtrip serialization") { - avroSerializer[Test] - .using(avroSettings) - .forValue - .use { serializer => - val test = Test("test") - - for { - serialized <- serializer.serialize("topic", Headers.empty, test) - deserializer <- avroDeserializer[Test].using(avroSettings).forValue - deserialized <- deserializer.deserialize("topic", Headers.empty, serialized) - } yield assert(deserialized == test) + ( + avroSerializer[Test].using(avroSettings).forValue, + avroDeserializer[Test].using(avroSettings).forValue + ).parTupled + .use { + case (serializer, deserializer) => + val test = Test("test") + + for { + serialized <- serializer.serialize("topic", Headers.empty, test) + deserialized <- deserializer.deserialize("topic", Headers.empty, serialized) + } yield assert(deserialized == test) } .unsafeRunSync() } it("should be able to do roundtrip serialization using compatible schemas") { - avroSerializer[Test2] - .using(avroSettings) - .forValue - .use { serializer => - val test2 = Test2("test", 42) - for { - - serialized <- serializer.serialize("topic2", Headers.empty, test2) - deserializer <- avroDeserializer[Test].using(avroSettings).forValue - deserialized <- deserializer.deserialize("topic2", Headers.empty, serialized) - } yield assert(deserialized == Test("test")) + ( + avroSerializer[Test2].using(avroSettings).forValue, + avroDeserializer[Test].using(avroSettings).forValue + ).parTupled + .use { + case (serializer, deserializer) => + val test2 = Test2("test", 42) + for { + + serialized <- serializer.serialize("topic2", Headers.empty, test2) + deserialized <- deserializer.deserialize("topic2", Headers.empty, serialized) + } yield assert(deserialized == Test("test")) } .unsafeRunSync() } it("should error when reader and writer schemas have mismatching logical types") { - avroSerializer[Long] - .using(avroSettings) - .forValue - .use { serializer => - val rawLong = 42L - - for { - serialized <- serializer.serialize("topic3", Headers.empty, rawLong) - deserializer <- avroDeserializer[Instant].using(avroSettings).forValue - deserialized <- deserializer.deserialize("topic3", Headers.empty, serialized).attempt - } yield assert(deserialized.isLeft) + ( + avroSerializer[Long].using(avroSettings).forValue, + avroDeserializer[Instant].using(avroSettings).forValue + ).parTupled + .use { + case (serializer, deserializer) => + val rawLong = 42L + + for { + serialized <- serializer.serialize("topic3", Headers.empty, rawLong) + deserialized <- deserializer.deserialize("topic3", Headers.empty, serialized).attempt + } yield assert(deserialized.isLeft) } .unsafeRunSync() } From daa297bfda96a512b881da68da5324eececac89d Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 21 Mar 2022 10:05:49 +0000 Subject: [PATCH 072/162] Fix merge --- .../src/main/scala/fs2/kafka/ConsumerSettings.scala | 8 ++++---- .../src/main/scala/fs2/kafka/ProducerSettings.scala | 13 +++++++------ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index 4dcb0668f..9f1b46516 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -52,8 +52,8 @@ sealed abstract class ConsumerSettings[F[_], K, V] { * Note that this will remove any custom `recordMetadata` configuration. **/ def withDeserializers[K0, V0]( - keyDeserializer: F[Deserializer[F, K0]], - valueDeserializer: F[Deserializer[F, V0]] + keyDeserializer: F[KeyDeserializer[F, K0]], + valueDeserializer: F[ValueDeserializer[F, V0]] ): ConsumerSettings[F, K0, V0] /** @@ -540,8 +540,8 @@ object ConsumerSettings { s"ConsumerSettings(closeTimeout = $closeTimeout, commitTimeout = $commitTimeout, pollInterval = $pollInterval, pollTimeout = $pollTimeout, commitRecovery = $commitRecovery)" override def withDeserializers[K0, V0]( - keyDeserializer: F[Deserializer[F, K0]], - valueDeserializer: F[Deserializer[F, V0]] + keyDeserializer: F[KeyDeserializer[F, K0]], + valueDeserializer: F[ValueDeserializer[F, V0]] ): ConsumerSettings[F, K0, V0] = copy( keyDeserializer = keyDeserializer, diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index 62fe9089a..d5c4498fa 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -8,6 +8,7 @@ package fs2.kafka import cats.effect.Sync import cats.{Applicative, Show} +import cats.syntax.all._ import fs2.kafka.security.KafkaCredentialStore import org.apache.kafka.clients.producer.ProducerConfig @@ -43,8 +44,8 @@ sealed abstract class ProducerSettings[F[_], K, V] { * Replace the serializers with those provided in the arguments. */ def withSerializers[K1, V1]( - keySerializer: F[Serializer[F, K1]], - valueSerializer: F[Serializer[F, V1]] + keySerializer: F[KeySerializer[F, K1]], + valueSerializer: F[ValueSerializer[F, V1]] ): ProducerSettings[F, K1, V1] /** @@ -304,8 +305,8 @@ object ProducerSettings { s"ProducerSettings(closeTimeout = $closeTimeout)" override def withSerializers[K1, V1]( - keySerializer: F[Serializer[F, K1]], - valueSerializer: F[Serializer[F, V1]] + keySerializer: F[KeySerializer[F, K1]], + valueSerializer: F[ValueSerializer[F, V1]] ): ProducerSettings[F, K1, V1] = copy(keySerializer = keySerializer, valueSerializer = valueSerializer) } @@ -365,8 +366,8 @@ object ProducerSettings { def nothing[F[_]](implicit F: Sync[F]): ProducerSettings[F, Nothing, Nothing] = { val nothingSerializer = F.pure(Serializer.fail[F, Nothing](new AssertionError("impossible"))) create[F, Nothing, Nothing]( - keySerializer = nothingSerializer, - valueSerializer = nothingSerializer + keySerializer = nothingSerializer.widen, + valueSerializer = nothingSerializer.widen ) } From 69cf025f353cc66eb8107b6831f977af67eabb2f Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 28 Mar 2022 10:14:17 +0200 Subject: [PATCH 073/162] Update sbt-mdoc to 2.3.2 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 95778ca57..eff2f8b66 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,6 +3,6 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.24") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.13.0") From 1b30f3e18a65f548ed90387f5519c3c926c518b0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 28 Mar 2022 10:14:35 +0200 Subject: [PATCH 074/162] Update cats-effect-laws, ... to 3.3.9 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d119e82c0..95e9b90df 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,4 @@ -val catsEffectVersion = "3.3.7" +val catsEffectVersion = "3.3.9" val catsVersion = "2.6.1" From 5ca8725c646e45f82a7a5dc9a97ed2f78e15af63 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 11 Apr 2022 10:15:55 +0200 Subject: [PATCH 075/162] Update vulcan to 1.8.3 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index be422c7d5..67e6d4cba 100644 --- a/build.sbt +++ b/build.sbt @@ -10,7 +10,7 @@ val kafkaVersion = "3.1.0" val testcontainersScalaVersion = "0.40.4" -val vulcanVersion = "1.8.0" +val vulcanVersion = "1.8.3" val munitVersion = "0.7.29" From 2f539c87d6b18076f5dc9296af5e187932751002 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 11 Apr 2022 10:16:11 +0200 Subject: [PATCH 076/162] Update sbt-mima-plugin to 1.1.0 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 3256f07a2..f518c4f17 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,7 +1,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") From cbc9c9b2c44d3f2d0c04d477af2b4c575216926d Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 15:45:38 +0200 Subject: [PATCH 077/162] Update testcontainers-scala-kafka, ... to 0.40.6 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index be422c7d5..c60bb0ad6 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val fs2Version = "3.2.5" val kafkaVersion = "3.1.0" -val testcontainersScalaVersion = "0.40.4" +val testcontainersScalaVersion = "0.40.6" val vulcanVersion = "1.8.0" From 87a2d755a9a2a1a8e7ff566ed84975471d05bf2c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 15:45:57 +0200 Subject: [PATCH 078/162] Update sbt-header to 5.7.0 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 3256f07a2..de97e8c89 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,7 +2,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5") +addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.14.2") From fd996733f9a92f88bf2bfc6a8afe16e42c8b342c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 15:46:16 +0200 Subject: [PATCH 079/162] Update kafka-avro-serializer to 7.0.3 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index be422c7d5..cdeb79638 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.9" val catsVersion = "2.6.1" -val confluentVersion = "7.0.1" +val confluentVersion = "7.0.3" val fs2Version = "3.2.5" From 3b5d912bbdfc6c5e561a807f44a59a0ec93e283b Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 15:46:48 +0200 Subject: [PATCH 080/162] Update scala3-library to 3.1.2 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index be422c7d5..0c719d751 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,7 @@ val munitVersion = "0.7.29" val scala2 = "2.13.8" -val scala3 = "3.1.1" +val scala3 = "3.1.2" lazy val `fs2-kafka` = project .in(file(".")) From fc06e3cb17604bc54f9c79ce1af48fc9447c9b59 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 15:47:28 +0200 Subject: [PATCH 081/162] Regenerate workflow with sbt-github-actions --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 32021e1c3..7a7b2b061 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8, 3.1.1] + scala: [2.13.8, 3.1.2] java: [temurin@8, temurin@17] runs-on: ${{ matrix.os }} steps: From 322109d3120767676ff544d8a8c5517f84abfec2 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 18:43:05 +0200 Subject: [PATCH 082/162] Revert commit(s) 2f539c87 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index f518c4f17..3256f07a2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,7 +1,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") From ada45233b2a57156397c89f7440f0c6ed305094c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 25 Apr 2022 18:43:10 +0200 Subject: [PATCH 083/162] Update sbt-mima-plugin to 1.1.0 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index de97e8c89..a393ead11 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,7 +1,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") From b93f395e9c9c9ff5dd29d33d0ec70d4b6fd1274a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 9 May 2022 18:20:50 +0200 Subject: [PATCH 084/162] Update kafka-avro-serializer to 7.1.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f5a24ef09..dcba9a245 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.9" val catsVersion = "2.6.1" -val confluentVersion = "7.0.3" +val confluentVersion = "7.1.1" val fs2Version = "3.2.5" From 383849ed51242548a8c7895a2e40d60182dbb061 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 23 May 2022 18:22:38 +0200 Subject: [PATCH 085/162] Update kafka-clients to 3.1.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index dcba9a245..8a2100cf7 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.1.1" val fs2Version = "3.2.5" -val kafkaVersion = "3.1.0" +val kafkaVersion = "3.1.1" val testcontainersScalaVersion = "0.40.6" From a0ee3047e4c2c583cb35fd6ebcfc7a97451e5ba4 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 5 Jul 2022 22:56:49 +0000 Subject: [PATCH 086/162] Update kafka-avro-serializer to 7.1.2 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index dcba9a245..c0f6412f1 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.9" val catsVersion = "2.6.1" -val confluentVersion = "7.1.1" +val confluentVersion = "7.1.2" val fs2Version = "3.2.5" From a4458c5880e93be0054293bac93e36e9853316f2 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 5 Jul 2022 22:57:02 +0000 Subject: [PATCH 087/162] Update scala3-library to 3.1.3 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index dcba9a245..6eff095af 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,7 @@ val munitVersion = "0.7.29" val scala2 = "2.13.8" -val scala3 = "3.1.2" +val scala3 = "3.1.3" lazy val `fs2-kafka` = project .in(file(".")) From 3672c5b3212409dc07d119cfecaa663f02a493b9 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 5 Jul 2022 22:57:56 +0000 Subject: [PATCH 088/162] Regenerate workflow with sbt-github-actions --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7a7b2b061..d3bb0bd72 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8, 3.1.2] + scala: [2.13.8, 3.1.3] java: [temurin@8, temurin@17] runs-on: ${{ matrix.os }} steps: From 4e9185bf98c65a192c55d9a5e46b657fa202096e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 5 Jul 2022 22:58:08 +0000 Subject: [PATCH 089/162] Update cats-effect-laws, ... to 3.3.13 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index dcba9a245..54dc164ad 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,4 @@ -val catsEffectVersion = "3.3.9" +val catsEffectVersion = "3.3.13" val catsVersion = "2.6.1" From 88e2a65dbf1a21c8641f890235bfa6eaa9a0c5a6 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Wed, 13 Jul 2022 14:10:46 +0100 Subject: [PATCH 090/162] Ignore cats-effect/fs2 updates in series/3.x, pin kafka-client to 3 --- .scala-steward.conf | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/.scala-steward.conf b/.scala-steward.conf index 13b5080e4..bb1f0ed6d 100644 --- a/.scala-steward.conf +++ b/.scala-steward.conf @@ -1,11 +1,21 @@ pullRequests.frequency = "14 days" -updates.pin = [{ +updates.ignore = [{ + groupId = "org.typelevel", + artifactId="cats-effect" +},{ + groupId = "org.typelevel", + artifactId="cats-effect-laws" +},{ groupId = "org.typelevel", - artifactId="cats-effect", - version = "2." -}, { + artifactId="cats-effect-testkit" +},{ groupId = "co.fs2", - artifactId="fs2-core", - version = "2." + artifactId="fs2-core" }] + +updates.pin = [{ + groupId="org.apache.kafka", + artifactId="kafka-clients", + version="3." +}] \ No newline at end of file From e377d9f3a0b88e28c13620ed3e9d299147736be3 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Wed, 13 Jul 2022 14:13:27 +0100 Subject: [PATCH 091/162] Ignore testcontainers updates in 3.x --- .scala-steward.conf | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.scala-steward.conf b/.scala-steward.conf index bb1f0ed6d..bb5164c92 100644 --- a/.scala-steward.conf +++ b/.scala-steward.conf @@ -12,6 +12,8 @@ updates.ignore = [{ },{ groupId = "co.fs2", artifactId="fs2-core" +},{ + groupId = "com.dimafeng" }] updates.pin = [{ From df65c8886e28267953425034a54d4f7d6a5e3d70 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 15 Jul 2022 11:14:34 +0100 Subject: [PATCH 092/162] Merge - WIP --- .github/workflows/ci.yml | 75 ++++++++-- build.sbt | 86 +++-------- .../kafka/CommittableProducerRecords.scala | 38 +++-- .../scala/fs2/kafka/ConsumerSettings.scala | 12 -- .../main/scala/fs2/kafka/Deserializer.scala | 11 +- .../scala/fs2/kafka/KafkaAdminClient.scala | 134 +++++++++--------- .../main/scala/fs2/kafka/KafkaConsumer.scala | 58 ++------ .../main/scala/fs2/kafka/KafkaProducer.scala | 4 +- .../fs2/kafka/KafkaProducerConnection.scala | 4 +- .../scala/fs2/kafka/ProducerSettings.scala | 14 -- .../kafka/TransactionalKafkaProducer.scala | 4 +- .../kafka/internal/KafkaConsumerActor.scala | 134 +++++++++++------- .../scala/fs2/kafka/internal/LogEntry.scala | 42 +++--- .../fs2/kafka/internal/WithAdminClient.scala | 2 +- .../scala/fs2/kafka/internal/syntax.scala | 15 +- .../test/scala/fs2/kafka/BaseKafkaSpec.scala | 37 +++-- .../scala/fs2/kafka/DeserializerSpec.scala | 12 ++ .../TransactionalKafkaProducerSpec.scala | 7 +- .../scala/fs2/kafka/internal/SyntaxSpec.scala | 36 +++++ .../fs2/kafka/vulcan/AvroDeserializer.scala | 34 +++-- .../kafka/vulcan/AvroDeserializerSpec.scala | 8 ++ project/plugins.sbt | 2 +- 22 files changed, 401 insertions(+), 368 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3bb0bd72..64406467c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,6 +15,11 @@ on: tags: [v*] env: + PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} + SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} + SONATYPE_CREDENTIAL_HOST: ${{ secrets.SONATYPE_CREDENTIAL_HOST }} + SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} + PGP_SECRET: ${{ secrets.PGP_SECRET }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: @@ -23,8 +28,13 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8, 3.1.3] + scala: [2.12.16, 2.13.8, 3.1.3] java: [temurin@8, temurin@17] + exclude: + - scala: 2.12.16 + java: temurin@17 + - scala: 3.1.3 + java: temurin@17 runs-on: ${{ matrix.os }} steps: - name: Checkout current branch (full) @@ -32,19 +42,37 @@ jobs: with: fetch-depth: 0 + - name: Download Java (temurin@8) + id: download-java-temurin-8 + if: matrix.java == 'temurin@8' + uses: typelevel/download-java@v1 + with: + distribution: temurin + java-version: 8 + - name: Setup Java (temurin@8) if: matrix.java == 'temurin@8' uses: actions/setup-java@v2 with: - distribution: temurin + distribution: jdkfile java-version: 8 + jdkFile: ${{ steps.download-java-temurin-8.outputs.jdkFile }} + + - name: Download Java (temurin@17) + id: download-java-temurin-17 + if: matrix.java == 'temurin@17' + uses: typelevel/download-java@v1 + with: + distribution: temurin + java-version: 17 - name: Setup Java (temurin@17) if: matrix.java == 'temurin@17' uses: actions/setup-java@v2 with: - distribution: temurin + distribution: jdkfile java-version: 17 + jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} - name: Cache sbt uses: actions/cache@v2 @@ -59,12 +87,12 @@ jobs: key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} - name: Check that workflows are up to date - run: sbt ++${{ matrix.scala }} githubWorkflowCheck + run: sbt '++${{ matrix.scala }}' 'project /' githubWorkflowCheck - - run: sbt ++${{ matrix.scala }} ci + - run: sbt '++${{ matrix.scala }}' ci - if: matrix.scala == '2.13.8' - run: sbt ++${{ matrix.scala }} docs/run + run: sbt '++${{ matrix.scala }}' docs/run publish: name: Publish Artifacts @@ -82,19 +110,37 @@ jobs: with: fetch-depth: 0 + - name: Download Java (temurin@8) + id: download-java-temurin-8 + if: matrix.java == 'temurin@8' + uses: typelevel/download-java@v1 + with: + distribution: temurin + java-version: 8 + - name: Setup Java (temurin@8) if: matrix.java == 'temurin@8' uses: actions/setup-java@v2 with: - distribution: temurin + distribution: jdkfile java-version: 8 + jdkFile: ${{ steps.download-java-temurin-8.outputs.jdkFile }} + + - name: Download Java (temurin@17) + id: download-java-temurin-17 + if: matrix.java == 'temurin@17' + uses: typelevel/download-java@v1 + with: + distribution: temurin + java-version: 17 - name: Setup Java (temurin@17) if: matrix.java == 'temurin@17' uses: actions/setup-java@v2 with: - distribution: temurin + distribution: jdkfile java-version: 17 + jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} - name: Cache sbt uses: actions/cache@v2 @@ -108,10 +154,21 @@ jobs: ~/Library/Caches/Coursier/v1 key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + - name: Import signing key + if: env.PGP_SECRET != '' && env.PGP_PASSPHRASE == '' + run: echo $PGP_SECRET | base64 -di | gpg --import + + - name: Import signing key and strip passphrase + if: env.PGP_SECRET != '' && env.PGP_PASSPHRASE != '' + run: | + echo "$PGP_SECRET" | base64 -di > /tmp/signing-key.gpg + echo "$PGP_PASSPHRASE" | gpg --pinentry-mode loopback --passphrase-fd 0 --import /tmp/signing-key.gpg + (echo "$PGP_PASSPHRASE"; echo; echo) | gpg --command-fd 0 --pinentry-mode loopback --change-passphrase $(gpg --list-secret-keys --with-colons 2> /dev/null | grep '^sec:' | cut --delimiter ':' --fields 5 | tail -n 1) + - env: PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt ++${{ matrix.scala }} ci-release + run: sbt '++${{ matrix.scala }}' ci-release docs/docusaurusPublishGhpages diff --git a/build.sbt b/build.sbt index a6036592f..7747b0876 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val fs2Version = "3.2.5" val kafkaVersion = "3.1.1" -val testcontainersScalaVersion = "0.40.6" +val testcontainersScalaVersion = "0.40.8" val vulcanVersion = "1.8.3" @@ -18,15 +18,19 @@ val scala2 = "2.13.8" val scala3 = "3.1.3" +ThisBuild / tlVersionIntroduced := Map("3" -> "2.1.0") + lazy val `fs2-kafka` = project .in(file(".")) .settings( - mimaSettings, + // Prevent spurious "mimaPreviousArtifacts is empty, not analyzing binary compatibility" message for root project + mimaReportBinaryIssues := {}, scalaSettings, noPublishSettings, console := (core / Compile / console).value, Test / console := (core / Test / console).value ) + .enablePlugins(TypelevelMimaPlugin) .aggregate(core, vulcan, `vulcan-testkit-munit`) lazy val core = project @@ -42,7 +46,6 @@ lazy val core = project ) ), publishSettings, - mimaSettings, scalaSettings, testSettings ) @@ -59,7 +62,6 @@ lazy val vulcan = project ) ), publishSettings, - mimaSettings, scalaSettings, testSettings ) @@ -76,7 +78,6 @@ lazy val `vulcan-testkit-munit` = project ) ), publishSettings, - mimaSettings, scalaSettings, testSettings ) @@ -133,7 +134,7 @@ lazy val dependencySettings = Seq( lazy val mdocSettings = Seq( mdoc := (Compile / run).evaluated, scalacOptions --= Seq("-Xfatal-warnings", "-Ywarn-unused"), - crossScalaVersions := Seq(scalaVersion.value), + crossScalaVersions := Seq(scala2), ScalaUnidoc / unidoc / unidocProjectFilter := inProjects(core, vulcan), ScalaUnidoc / unidoc / target := (LocalRootProject / baseDirectory).value / "website" / "static" / "api", cleanFiles += (ScalaUnidoc / unidoc / target).value, @@ -206,7 +207,6 @@ ThisBuild / githubWorkflowArtifactUpload := false ThisBuild / githubWorkflowJavaVersions := Seq(JavaSpec.temurin("8"), JavaSpec.temurin("17")) -ThisBuild / githubWorkflowTargetTags ++= Seq("v*") ThisBuild / githubWorkflowPublishTargetBranches := Seq(RefPredicate.StartsWith(Ref.Tag("v"))) @@ -260,43 +260,12 @@ lazy val publishSettings = ) ) -lazy val mimaSettings = Seq( - mimaPreviousArtifacts := { - if (publishArtifact.value) { - Set(organization.value %% moduleName.value % (ThisBuild / previousStableVersion).value.get) - } else Set() - }, - mimaBinaryIssueFilters ++= { - import com.typesafe.tools.mima.core._ - // format: off - Seq( - ProblemFilters.exclude[Problem]("fs2.kafka.internal.*"), - ProblemFilters.exclude[IncompatibleSignatureProblem]("*"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.vulcan.AvroSettings.registerSchema"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.vulcan.AvroSettings.withRegisterSchema"), - ProblemFilters.exclude[DirectMissingMethodProblem]("fs2.kafka.vulcan.AvroSettings#AvroSettingsImpl.copy"), - ProblemFilters.exclude[DirectMissingMethodProblem]("fs2.kafka.vulcan.AvroSettings#AvroSettingsImpl.this"), - ProblemFilters.exclude[DirectMissingMethodProblem]("fs2.kafka.vulcan.AvroSettings#AvroSettingsImpl.apply"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.KafkaAdminClient.deleteConsumerGroups"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.KafkaProducerConnection.produce"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.KafkaProducerConnection.metrics"), - ProblemFilters.exclude[InheritedNewAbstractMethodProblem]("fs2.kafka.KafkaConsumer.committed"), - - // package-private - ProblemFilters.exclude[DirectMissingMethodProblem]("fs2.kafka.KafkaProducer.from"), - - // sealed - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.ConsumerSettings.withDeserializers"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.ProducerSettings.withSerializers"), - ProblemFilters.exclude[ReversedMissingMethodProblem]("fs2.kafka.vulcan.AvroSettings.*"), - ProblemFilters.exclude[FinalMethodProblem]("fs2.kafka.vulcan.AvroSettings.*"), - - // private - ProblemFilters.exclude[Problem]("fs2.kafka.vulcan.AvroSettings#AvroSettingsImpl.*") - ) +ThisBuild / mimaBinaryIssueFilters ++= { + import com.typesafe.tools.mima.core._ + // format: off + Seq.empty // format: on - } -) +} lazy val noMimaSettings = Seq(mimaPreviousArtifacts := Set()) @@ -310,32 +279,7 @@ ThisBuild / scalaVersion := scala2 ThisBuild / crossScalaVersions := Seq(scala2, scala3) lazy val scalaSettings = Seq( - scalacOptions ++= Seq( - "-deprecation", - "-encoding", - "UTF-8", - "-feature", - "-language:implicitConversions", - "-unchecked" - ) ++ ( - if (scalaVersion.value.startsWith("2")) - Seq( - "-language:higherKinds", - "-Xlint", - "-Ywarn-dead-code", - "-Ywarn-numeric-widen", - "-Ywarn-value-discard", - "-Ywarn-unused", - "-Xfatal-warnings" - ) - else - Seq( - "-Ykind-projector", - "-source:3.0-migration", - "-Xignore-scala2-macros" - ) - ), - Compile / doc / scalacOptions += "-nowarn", // workaround for https://github.com/scala/bug/issues/12007 + Compile / doc / scalacOptions += "-nowarn", // workaround for https://github.com/scala/bug/issues/12007 but also suppresses genunine problems Compile / console / scalacOptions --= Seq("-Xlint", "-Ywarn-unused"), Test / console / scalacOptions := (Compile / console / scalacOptions).value, Compile / unmanagedSourceDirectories ++= @@ -402,6 +346,10 @@ ThisBuild / updateSiteVariables := { IO.write(file, fileContents) } +def versionIntroduced(v: String) = Seq( + tlVersionIntroduced := List("2.12", "2.13", "3").map(_ -> v).toMap +) + def addCommandsAlias(name: String, values: List[String]) = addCommandAlias(name, values.mkString(";", ";", "")) diff --git a/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala index adf4629f3..dcfff51b2 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala @@ -17,8 +17,6 @@ import cats.{Applicative, Bitraverse, Eq, Eval, Foldable, Show, Traverse} import fs2.Chunk import fs2.kafka.internal.syntax._ -import scala.collection.mutable - /** * [[CommittableProducerRecords]] represents zero or more [[ProducerRecord]]s * and a [[CommittableOffset]], used by [[TransactionalKafkaProducer]] to @@ -54,29 +52,14 @@ object CommittableProducerRecords { * Creates a new [[CommittableProducerRecords]] for producing zero or * more [[ProducerRecord]]s and committing an offset atomically within * a transaction. + * + * @see [[chunk]] if your `records` are already contained in an [[fs2.Chunk]] */ def apply[F[_], G[+_], K, V]( records: G[ProducerRecord[K, V]], offset: CommittableOffset[F] - )(implicit G: Foldable[G]): CommittableProducerRecords[F, K, V] = { - val numRecords = G.size(records).toInt - val chunk = if (numRecords <= 1) { - G.get(records)(0) match { - case None => Chunk.empty[ProducerRecord[K, V]] - case Some(record) => Chunk.singleton(record) - } - } else { - val buf = new mutable.ArrayBuffer[ProducerRecord[K, V]](numRecords) - G.foldLeft(records, ()) { - case (_, record) => - buf += record - () - } - Chunk.array(buf.toArray) - } - - new CommittableProducerRecordsImpl(chunk, offset) - } + )(implicit G: Foldable[G]): CommittableProducerRecords[F, K, V] = + chunk(Chunk.iterable(Foldable[G].toIterable(records)), offset) /** * Creates a new [[CommittableProducerRecords]] for producing exactly @@ -87,7 +70,18 @@ object CommittableProducerRecords { record: ProducerRecord[K, V], offset: CommittableOffset[F] ): CommittableProducerRecords[F, K, V] = - new CommittableProducerRecordsImpl(Chunk.singleton(record), offset) + chunk(Chunk.singleton(record), offset) + + /** + * Creates a new [[CommittableProducerRecords]] for producing zero or + * more [[ProducerRecord]]s and committing an offset atomically within + * a transaction. + */ + def chunk[F[_], K, V]( + records: Chunk[ProducerRecord[K, V]], + offset: CommittableOffset[F] + ): CommittableProducerRecords[F, K, V] = + new CommittableProducerRecordsImpl(records, offset) implicit def committableProducerRecordsShow[F[_], K, V]( implicit diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index 9f1b46516..d5a08c811 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -6,7 +6,6 @@ package fs2.kafka -import cats.effect.Sync import cats.{Applicative, Show} import fs2.kafka.security.KafkaCredentialStore import org.apache.kafka.clients.consumer.ConsumerConfig @@ -608,17 +607,6 @@ object ConsumerSettings { valueDeserializer = valueDeserializer.forValue ) - /** - * Create a `ConsumerSettings` instance using placeholder deserializers that return unit. - * These can be subsequently replaced using `withDeserializers`, allowing configuration of - * deserializers to be decoupled from other configuration. - */ - def unit[F[_]](implicit F: Sync[F]): ConsumerSettings[F, Unit, Unit] = - create( - keyDeserializer = F.pure(Deserializer.unit), - valueDeserializer = F.pure(Deserializer.unit) - ) - implicit def consumerSettingsShow[F[_], K, V]: Show[ConsumerSettings[F, K, V]] = Show.fromToString } diff --git a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala index fbddc3daa..88a7638a5 100644 --- a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala @@ -209,7 +209,7 @@ object GenericDeserializer { * default `String` deserializer uses `UTF-8`. */ def string[F[_]](charset: Charset)(implicit F: Sync[F]): Deserializer[F, String] = - Deserializer.lift(bytes => F.pure(new String(bytes, charset))) + Deserializer.lift(bytes => F.catchNonFatal(new String(bytes, charset))) /** * Creates a new [[Deserializer]] which deserializes `String` @@ -226,6 +226,15 @@ object GenericDeserializer { implicit def identity[F[_]](implicit F: Sync[F]): Deserializer[F, Array[Byte]] = Deserializer.lift(bytes => F.pure(bytes)) + /** + * The attempt [[Deserializer]] try to deserialize to type `A`, + * When it fails returns `Left` containing the exception, otherwise returns `Right` with the value `A` + */ + implicit def attempt[F[_], A]( + implicit deserializer: Deserializer[F, A] + ): Deserializer[F, Either[Throwable, A]] = + deserializer.attempt + /** * The option [[Deserializer]] returns `None` when the bytes are * `null`, and otherwise deserializes using the deserializer for diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index 4ea1daaef..ef4d0e059 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -6,8 +6,9 @@ package fs2.kafka -import cats.Foldable +import cats.{Foldable, Functor} import cats.effect._ +import cats.syntax.all._ import fs2.Stream import fs2.kafka.KafkaAdminClient._ import fs2.kafka.admin.MkAdminClient @@ -216,53 +217,53 @@ sealed abstract class KafkaAdminClient[F[_]] { object KafkaAdminClient { - private[this] def alterConfigsWith[F[_], G[_]]( + private[this] def alterConfigsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], configs: Map[ConfigResource, G[AlterConfigOp]] - )(implicit G: Foldable[G]): F[Unit] = - withAdminClient(_.incrementalAlterConfigs(configs.asJavaMap).all.void) + ): F[Unit] = + withAdminClient(_.incrementalAlterConfigs(configs.asJavaMap).all).void - private[this] def createPartitionsWith[F[_]]( + private[this] def createPartitionsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], newPartitions: Map[String, NewPartitions] ): F[Unit] = - withAdminClient(_.createPartitions(newPartitions.asJava).all.void) + withAdminClient(_.createPartitions(newPartitions.asJava).all).void - private[this] def createTopicWith[F[_]]( + private[this] def createTopicWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], topic: NewTopic ): F[Unit] = - withAdminClient(_.createTopics(java.util.Collections.singleton(topic)).all.void) + withAdminClient(_.createTopics(java.util.Collections.singleton(topic)).all).void - private[this] def createTopicsWith[F[_], G[_]]( + private[this] def createTopicsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], topics: G[NewTopic] - )(implicit G: Foldable[G]): F[Unit] = - withAdminClient(_.createTopics(topics.asJava).all.void) + ): F[Unit] = + withAdminClient(_.createTopics(topics.asJava).all).void - private[this] def createAclsWith[F[_], G[_]]( + private[this] def createAclsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], acls: G[AclBinding] - )(implicit G: Foldable[G]): F[Unit] = - withAdminClient(_.createAcls(acls.asJava).all.void) + ): F[Unit] = + withAdminClient(_.createAcls(acls.asJava).all).void - private[this] def deleteTopicWith[F[_]]( + private[this] def deleteTopicWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], topic: String ): F[Unit] = - withAdminClient(_.deleteTopics(java.util.Collections.singleton(topic)).all.void) + withAdminClient(_.deleteTopics(java.util.Collections.singleton(topic)).all).void - private[this] def deleteTopicsWith[F[_], G[_]]( + private[this] def deleteTopicsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], topics: G[String] - )(implicit G: Foldable[G]): F[Unit] = - withAdminClient(_.deleteTopics(topics.asJava).all.void) + ): F[Unit] = + withAdminClient(_.deleteTopics(topics.asJava).all).void - private[this] def deleteAclsWith[F[_], G[_]]( + private[this] def deleteAclsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], filters: G[AclBindingFilter] - )(implicit G: Foldable[G]): F[Unit] = - withAdminClient(_.deleteAcls(filters.asJava).all.void) + ): F[Unit] = + withAdminClient(_.deleteAcls(filters.asJava).all).void sealed abstract class DescribeCluster[F[_]] { @@ -276,12 +277,12 @@ object KafkaAdminClient { def clusterId: F[String] } - private[this] def describeClusterWith[F[_]]( + private[this] def describeClusterWith[F[_]: Functor]( withAdminClient: WithAdminClient[F] ): DescribeCluster[F] = new DescribeCluster[F] { override def nodes: F[Set[Node]] = - withAdminClient(_.describeCluster.nodes.map(_.toSet)) + withAdminClient(_.describeCluster.nodes).map(_.toSet) override def controller: F[Node] = withAdminClient(_.describeCluster.controller) @@ -293,33 +294,33 @@ object KafkaAdminClient { "DescribeCluster$" + System.identityHashCode(this) } - private[this] def describeConfigsWith[F[_], G[_]]( + private[this] def describeConfigsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], resources: G[ConfigResource] - )(implicit G: Foldable[G]): F[Map[ConfigResource, List[ConfigEntry]]] = + ): F[Map[ConfigResource, List[ConfigEntry]]] = withAdminClient( - _.describeConfigs(resources.asJava).all.map(_.toMap.map { - case (k, v) => (k, v.entries().toList) - }.toMap) - ) + _.describeConfigs(resources.asJava).all + ).map(_.toMap.map { + case (k, v) => (k, v.entries().toList) + }.toMap) - private[this] def describeConsumerGroupsWith[F[_], G[_]]( + private[this] def describeConsumerGroupsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], groupIds: G[String] - )(implicit G: Foldable[G]): F[Map[String, ConsumerGroupDescription]] = - withAdminClient(_.describeConsumerGroups(groupIds.asJava).all.map(_.toMap)) + ): F[Map[String, ConsumerGroupDescription]] = + withAdminClient(_.describeConsumerGroups(groupIds.asJava).all).map(_.toMap) - private[this] def describeTopicsWith[F[_], G[_]]( + private[this] def describeTopicsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], topics: G[String] - )(implicit G: Foldable[G]): F[Map[String, TopicDescription]] = - withAdminClient(_.describeTopics(topics.asJava).allTopicNames.map(_.toMap)) + ): F[Map[String, TopicDescription]] = + withAdminClient(_.describeTopics(topics.asJava).allTopicNames).map(_.toMap) - private[this] def describeAclsWith[F[_]]( + private[this] def describeAclsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], filter: AclBindingFilter ): F[List[AclBinding]] = - withAdminClient(_.describeAcls(filter).values().map(_.toList)) + withAdminClient(_.describeAcls(filter).values()).map(_.toList) sealed abstract class ListConsumerGroupOffsetsForPartitions[F[_]] { @@ -327,11 +328,11 @@ object KafkaAdminClient { def partitionsToOffsetAndMetadata: F[Map[TopicPartition, OffsetAndMetadata]] } - private[this] def listConsumerGroupOffsetsForPartitionsWith[F[_], G[_]]( + private[this] def listConsumerGroupOffsetsForPartitionsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], groupId: String, partitions: G[TopicPartition] - )(implicit G: Foldable[G]): ListConsumerGroupOffsetsForPartitions[F] = + ): ListConsumerGroupOffsetsForPartitions[F] = new ListConsumerGroupOffsetsForPartitions[F] { private[this] def options: ListConsumerGroupOffsetsOptions = new ListConsumerGroupOffsetsOptions().topicPartitions(partitions.asJava) @@ -341,8 +342,8 @@ object KafkaAdminClient { adminClient .listConsumerGroupOffsets(groupId, options) .partitionsToOffsetAndMetadata - .map(_.toMap) - } + + }.map(_.toMap) override def toString: String = s"ListConsumerGroupOffsetsForPartitions(groupId = $groupId, partitions = $partitions)" @@ -359,7 +360,7 @@ object KafkaAdminClient { )(implicit G: Foldable[G]): ListConsumerGroupOffsetsForPartitions[F] } - private[this] def listConsumerGroupOffsetsWith[F[_]]( + private[this] def listConsumerGroupOffsetsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], groupId: String ): ListConsumerGroupOffsets[F] = @@ -369,8 +370,7 @@ object KafkaAdminClient { adminClient .listConsumerGroupOffsets(groupId) .partitionsToOffsetAndMetadata - .map(_.toMap) - } + }.map(_.toMap) override def forPartitions[G[_]]( partitions: G[TopicPartition] @@ -390,15 +390,15 @@ object KafkaAdminClient { def listings: F[List[ConsumerGroupListing]] } - private[this] def listConsumerGroupsWith[F[_]]( + private[this] def listConsumerGroupsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F] ): ListConsumerGroups[F] = new ListConsumerGroups[F] { override def groupIds: F[List[String]] = - withAdminClient(_.listConsumerGroups.all.map(_.mapToList(_.groupId))) + withAdminClient(_.listConsumerGroups.all).map(_.mapToList(_.groupId)) override def listings: F[List[ConsumerGroupListing]] = - withAdminClient(_.listConsumerGroups.all.map(_.toList)) + withAdminClient(_.listConsumerGroups.all).map(_.toList) override def toString: String = "ListConsumerGroups$" + System.identityHashCode(this) @@ -416,7 +416,7 @@ object KafkaAdminClient { def namesToListings: F[Map[String, TopicListing]] } - private[this] def listTopicsIncludeInternalWith[F[_]]( + private[this] def listTopicsIncludeInternalWith[F[_]: Functor]( withAdminClient: WithAdminClient[F] ): ListTopicsIncludeInternal[F] = new ListTopicsIncludeInternal[F] { @@ -424,13 +424,13 @@ object KafkaAdminClient { new ListTopicsOptions().listInternal(true) override def names: F[Set[String]] = - withAdminClient(_.listTopics(options).names.map(_.toSet)) + withAdminClient(_.listTopics(options).names).map(_.toSet) override def listings: F[List[TopicListing]] = - withAdminClient(_.listTopics(options).listings.map(_.toList)) + withAdminClient(_.listTopics(options).listings).map(_.toList) override def namesToListings: F[Map[String, TopicListing]] = - withAdminClient(_.listTopics(options).namesToListings.map(_.toMap)) + withAdminClient(_.listTopics(options).namesToListings).map(_.toMap) override def toString: String = "ListTopicsIncludeInternal$" + System.identityHashCode(this) @@ -451,18 +451,18 @@ object KafkaAdminClient { def includeInternal: ListTopicsIncludeInternal[F] } - private[this] def listTopicsWith[F[_]]( + private[this] def listTopicsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F] ): ListTopics[F] = new ListTopics[F] { override def names: F[Set[String]] = - withAdminClient(_.listTopics.names.map(_.toSet)) + withAdminClient(_.listTopics.names).map(_.toSet) override def listings: F[List[TopicListing]] = - withAdminClient(_.listTopics.listings.map(_.toList)) + withAdminClient(_.listTopics.listings).map(_.toList) override def namesToListings: F[Map[String, TopicListing]] = - withAdminClient(_.listTopics.namesToListings.map(_.toMap)) + withAdminClient(_.listTopics.namesToListings).map(_.toMap) override def includeInternal: ListTopicsIncludeInternal[F] = listTopicsIncludeInternalWith(withAdminClient) @@ -471,25 +471,25 @@ object KafkaAdminClient { "ListTopics$" + System.identityHashCode(this) } - private[this] def alterConsumerGroupOffsetsWith[F[_]]( + private[this] def alterConsumerGroupOffsetsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], groupId: String, offsets: Map[TopicPartition, OffsetAndMetadata] ): F[Unit] = - withAdminClient(_.alterConsumerGroupOffsets(groupId, offsets.asJava).all().void) + withAdminClient(_.alterConsumerGroupOffsets(groupId, offsets.asJava).all()).void - private[this] def deleteConsumerGroupOffsetsWith[F[_]]( + private[this] def deleteConsumerGroupOffsetsWith[F[_]: Functor]( withAdminClient: WithAdminClient[F], groupId: String, partitions: Set[TopicPartition] ): F[Unit] = - withAdminClient(_.deleteConsumerGroupOffsets(groupId, partitions.asJava).all().void) + withAdminClient(_.deleteConsumerGroupOffsets(groupId, partitions.asJava).all()).void - private[this] def deleteConsumerGroupsWith[F[_], G[_]]( + private[this] def deleteConsumerGroupsWith[F[_]: Functor, G[_]: Foldable]( withAdminClient: WithAdminClient[F], groupIds: G[String] - )(implicit G: Foldable[G]): F[Unit] = - withAdminClient(_.deleteConsumerGroups(groupIds.asJava).all().void) + ): F[Unit] = + withAdminClient(_.deleteConsumerGroups(groupIds.asJava).all()).void /** * Creates a new [[KafkaAdminClient]] in the `Resource` context, @@ -514,9 +514,9 @@ object KafkaAdminClient { G: Async[G], mk: MkAdminClient[F] ): Resource[F, KafkaAdminClient[G]] = - WithAdminClient[F, G](mk, settings).map(create) + WithAdminClient[F, G](mk, settings).map(create[G]) - private def create[F[_]](client: WithAdminClient[F]) = + private def create[F[_]: Functor](client: WithAdminClient[F]) = new KafkaAdminClient[F] { override def alterConfigs[G[_]](configs: Map[ConfigResource, G[AlterConfigOp]])( @@ -627,10 +627,10 @@ object KafkaAdminClient { * to code defined in this object, ensuring factory methods require an instance * to be provided at the call site. */ - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig1[F[_]]: MkAdminClient[F] = throw new AssertionError("should not be used") - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig2[F[_]]: MkAdminClient[F] = throw new AssertionError("should not be used") } diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala index 1a3bf4beb..bd7e89d24 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala @@ -93,10 +93,10 @@ object KafkaConsumer { } }(_.cancel) - private def startConsumerActor[F[_]]( - requests: QueueSource[F, Request[F]], + private def startConsumerActor[F[_], K, V]( + requests: QueueSource[F, Request[F, K, V]], polls: QueueSource[F, Request.Poll[F]], - actor: KafkaConsumerActor[F] + actor: KafkaConsumerActor[F, K, V] )( implicit F: Async[F] ): Resource[F, FakeFiber[F]] = @@ -117,11 +117,9 @@ object KafkaConsumer { } private def createKafkaConsumer[F[_], K, V]( - requests: QueueSink[F, Request[F]], + requests: QueueSink[F, Request[F, K, V]], settings: ConsumerSettings[F, K, V], - keyDes: KeyDeserializer[F, K], - valueDes: ValueDeserializer[F, V], - actor: KafkaConsumerActor[F], + actor: KafkaConsumerActor[F, K, V], fiber: FakeFiber[F], streamIdRef: Ref[F, StreamId], id: Int, @@ -136,7 +134,7 @@ object KafkaConsumer { Queue.bounded(settings.maxPrefetchBatches - 1) type PartitionResult = - (Chunk[KafkaByteConsumerRecord], FetchCompletedReason) + (Chunk[CommittableConsumerRecord[F, K, V]], FetchCompletedReason) type PartitionsMap = Map[TopicPartition, Stream[F, CommittableConsumerRecord[F, K, V]]] type PartitionsMapQueue = Queue[F, Option[PartitionsMap]] @@ -163,23 +161,6 @@ object KafkaConsumer { .void stopReqs <- Deferred[F, Unit] } yield Stream.eval { - def committableConsumerRecord( - record: ConsumerRecord[K, V], - partition: TopicPartition - ): CommittableConsumerRecord[F, K, V] = - CommittableConsumerRecord( - record = record, - offset = CommittableOffset( - topicPartition = partition, - consumerGroupId = actor.consumerGroupId, - offsetAndMetadata = new OffsetAndMetadata( - record.offset + 1L, - settings.recordMetadata(record) - ), - commit = actor.offsetCommit - ) - ) - def fetchPartition: F[Unit] = F.deferred[PartitionResult].flatMap { deferred => val callback: PartitionResult => F[Unit] = deferred.complete(_).void @@ -207,21 +188,12 @@ object KafkaConsumer { assigned.ifM(storeFetch, completeRevoked) } >> deferred.get - F.race(shutdown, fetch).flatMap { case Left(()) => stopReqs.complete(()).void case Right((chunk, reason)) => - val c = chunk.traverse[F, CommittableConsumerRecord[F, K, V]] { rec => - ConsumerRecord - .fromJava[F, K, V](rec, keyDes, valueDes) - .map(committableConsumerRecord(_, partition)) - } - - val enqueueChunk = c.flatMap { chunk => - chunks.offer(Some(chunk)).unlessA(chunk.isEmpty) - } + val enqueueChunk = chunks.offer(Some(chunk)).unlessA(chunk.isEmpty) val completeRevoked = stopReqs.complete(()).void.whenA(reason.topicPartitionRevoked) @@ -404,7 +376,7 @@ object KafkaConsumer { } private[this] def request[A]( - request: (Either[Throwable, A] => F[Unit]) => Request[F] + request: (Either[Throwable, A] => F[Unit]) => Request[F, K, V] ): F[A] = Deferred[F, Either[Throwable, A]].flatMap { deferred => requests.offer(request(deferred.complete(_).void)) >> @@ -662,9 +634,9 @@ object KafkaConsumer { id <- Resource.eval(F.delay(new Object().hashCode)) jitter <- Resource.eval(Jitter.default[F]) logging <- Resource.eval(Logging.default[F](id)) - requests <- Resource.eval(Queue.unbounded[F, Request[F]]) + requests <- Resource.eval(Queue.unbounded[F, Request[F, K, V]]) polls <- Resource.eval(Queue.bounded[F, Request.Poll[F]](1)) - ref <- Resource.eval(Ref.of[F, State[F]](State.empty)) + ref <- Resource.eval(Ref.of[F, State[F, K, V]](State.empty)) streamId <- Resource.eval(Ref.of[F, StreamId](0)) dispatcher <- Dispatcher[F] stopConsumingDeferred <- Resource.eval(Deferred[F, Unit]) @@ -674,8 +646,10 @@ object KafkaConsumer { implicit val logging0: Logging[F] = logging implicit val dispatcher0: Dispatcher[F] = dispatcher - new KafkaConsumerActor[F]( + new KafkaConsumerActor( settings = settings, + keyDeserializer = keyDeserializer, + valueDeserializer = valueDeserializer, ref = ref, requests = requests, withConsumer = withConsumer @@ -686,8 +660,6 @@ object KafkaConsumer { } yield createKafkaConsumer( requests, settings, - keyDeserializer, - valueDeserializer, actor, actorFiber.combine(polls), streamId, @@ -810,10 +782,10 @@ object KafkaConsumer { * to code defined in this object, ensuring factory methods require an instance * to be provided at the call site. */ - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig1[F[_]]: MkConsumer[F] = throw new AssertionError("should not be used") - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig2[F[_]]: MkConsumer[F] = throw new AssertionError("should not be used") } diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index f9f19f121..185f68ea0 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -289,10 +289,10 @@ object KafkaProducer { * to code defined in this object, ensuring factory methods require an instance * to be provided at the call site. */ - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig1[F[_]]: MkProducer[F] = throw new AssertionError("should not be used") - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig2[F[_]]: MkProducer[F] = throw new AssertionError("should not be used") } diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala index 284e68e59..b7d948b78 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala @@ -148,10 +148,10 @@ object KafkaProducerConnection { * to code defined in this object, ensuring factory methods require an instance * to be provided at the call site. */ - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig1[F[_]]: MkProducer[F] = throw new AssertionError("should not be used") - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig2[F[_]]: MkProducer[F] = throw new AssertionError("should not be used") } diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index d5c4498fa..c0f30df93 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -6,7 +6,6 @@ package fs2.kafka -import cats.effect.Sync import cats.{Applicative, Show} import cats.syntax.all._ import fs2.kafka.security.KafkaCredentialStore @@ -358,19 +357,6 @@ object ProducerSettings { ): ProducerSettings[F, K, V] = create(keySerializer = keySerializer.forKey, valueSerializer = valueSerializer.forValue) - /** - * Create a `ProducerSettings` instance using placeholder serializers that serialize nothing. - * These can be subsequently replaced using `withSerializers`, allowing configuration of - * serializers to be decoupled from other configuration. - */ - def nothing[F[_]](implicit F: Sync[F]): ProducerSettings[F, Nothing, Nothing] = { - val nothingSerializer = F.pure(Serializer.fail[F, Nothing](new AssertionError("impossible"))) - create[F, Nothing, Nothing]( - keySerializer = nothingSerializer.widen, - valueSerializer = nothingSerializer.widen - ) - } - implicit def producerSettingsShow[F[_], K, V]: Show[ProducerSettings[F, K, V]] = Show.fromToString } diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index c6c705445..65cbb68b8 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -233,10 +233,10 @@ object TransactionalKafkaProducer { * to code defined in this object, ensuring factory methods require an instance * to be provided at the call site. */ - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig1[F[_]]: MkProducer[F] = throw new AssertionError("should not be used") - @nowarn("cat=unused") + @nowarn("msg=never used") implicit private def mkAmbig2[F[_]]: MkProducer[F] = throw new AssertionError("should not be used") } diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index 21c251125..dc345f9fa 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -22,11 +22,14 @@ import fs2.kafka.internal.syntax._ import java.time.Duration import java.util -import org.apache.kafka.clients.consumer.{ConsumerRebalanceListener, OffsetAndMetadata} +import org.apache.kafka.clients.consumer.{ + ConsumerConfig, + ConsumerRebalanceListener, + OffsetAndMetadata +} import org.apache.kafka.common.TopicPartition import scala.collection.immutable.SortedSet -import org.apache.kafka.clients.consumer.ConsumerConfig /** * [[KafkaConsumerActor]] wraps a Java `KafkaConsumer` and works similar to @@ -43,10 +46,12 @@ import org.apache.kafka.clients.consumer.ConsumerConfig * backpressure, as long as `Fetch` requests are only issued when there * is more demand. */ -private[kafka] final class KafkaConsumerActor[F[_]]( - settings: ConsumerSettings[F, _, _], - val ref: Ref[F, State[F]], - requests: Queue[F, Request[F]], +private[kafka] final class KafkaConsumerActor[F[_], K, V]( + settings: ConsumerSettings[F, K, V], + keyDeserializer: Deserializer[F, K], + valueDeserializer: Deserializer[F, V], + val ref: Ref[F, State[F, K, V]], + requests: Queue[F, Request[F, K, V]], withConsumer: WithConsumer[F] )( implicit F: Async[F], @@ -57,9 +62,9 @@ private[kafka] final class KafkaConsumerActor[F[_]]( import logging._ private[this] type ConsumerRecords = - Map[TopicPartition, NonEmptyVector[KafkaByteConsumerRecord]] + Map[TopicPartition, NonEmptyVector[CommittableConsumerRecord[F, K, V]]] - private[kafka] val consumerGroupId: Option[String] = + private[this] val consumerGroupId: Option[String] = settings.properties.get(ConsumerConfig.GROUP_ID_CONFIG) val consumerRebalanceListener: ConsumerRebalanceListener = @@ -139,7 +144,7 @@ private[kafka] final class KafkaConsumerActor[F[_]]( } private[this] def revoked(revoked: SortedSet[TopicPartition]): F[Unit] = { - def withState[A] = StateT.apply[Id, State[F], A](_) + def withState[A] = StateT.apply[Id, State[F, K, V], A](_) def completeWithRecords(withRecords: Set[TopicPartition]) = withState { st => if (withRecords.nonEmpty) { @@ -233,17 +238,42 @@ private[kafka] final class KafkaConsumerActor[F[_]]( } } - private[this] def records(batch: KafkaByteConsumerRecords): ConsumerRecords = - batch.partitions.toVector.map { partition => - partition -> NonEmptyVector - .fromVectorUnsafe(batch.records(partition).toVector) - }.toMap + private[this] def committableConsumerRecord( + record: ConsumerRecord[K, V], + partition: TopicPartition + ): CommittableConsumerRecord[F, K, V] = + CommittableConsumerRecord( + record = record, + offset = CommittableOffset( + topicPartition = partition, + consumerGroupId = consumerGroupId, + offsetAndMetadata = new OffsetAndMetadata( + record.offset + 1L, + settings.recordMetadata(record) + ), + commit = offsetCommit + ) + ) + + private[this] def records(batch: KafkaByteConsumerRecords): F[ConsumerRecords] = + batch.partitions.toVector + .traverse { partition => + NonEmptyVector + .fromVectorUnsafe(batch.records(partition).toVector) + .traverse { record => + ConsumerRecord + .fromJava(record, keyDeserializer, valueDeserializer) + .map(committableConsumerRecord(_, partition)) + } + .map((partition, _)) + } + .map(_.toMap) private[this] val pollTimeout: Duration = settings.pollTimeout.toJava private[this] val poll: F[Unit] = { - def pollConsumer(state: State[F]): F[ConsumerRecords] = + def pollConsumer(state: State[F, K, V]): F[ConsumerRecords] = withConsumer .blocking { consumer => val assigned = consumer.assignment.toSet @@ -261,11 +291,11 @@ private[kafka] final class KafkaConsumerActor[F[_]]( consumer.poll(pollTimeout) } - .map(records) + .flatMap(records) def handlePoll(newRecords: ConsumerRecords, initialRebalancing: Boolean): F[Unit] = { def handleBatch( - state: State[F], + state: State[F, K, V], pendingCommits: Option[HandlePollResult.PendingCommits] ) = if (state.fetches.isEmpty) { @@ -339,7 +369,7 @@ private[kafka] final class KafkaConsumerActor[F[_]]( } } - def handlePendingCommits(state: State[F]) = { + def handlePendingCommits(state: State[F, K, V]) = { val currentRebalancing = state.rebalancing if (initialRebalancing && !currentRebalancing && state.pendingCommits.nonEmpty) { @@ -383,7 +413,7 @@ private[kafka] final class KafkaConsumerActor[F[_]]( } } - def handle(request: Request[F]): F[Unit] = + def handle(request: Request[F, K, V]): F[Unit] = request match { case Request.Poll() => poll case request @ Request.Commit(_, _) => commit(request) @@ -437,17 +467,13 @@ private[kafka] final class KafkaConsumerActor[F[_]]( } private[kafka] object KafkaConsumerActor { - final case class FetchRequest[F[_]]( - callback: ((Chunk[KafkaByteConsumerRecord], FetchCompletedReason)) => F[Unit] + final case class FetchRequest[F[_], K, V]( + callback: ((Chunk[CommittableConsumerRecord[F, K, V]], FetchCompletedReason)) => F[Unit] ) { - def completeRevoked( - chunk: Chunk[KafkaByteConsumerRecord] - ): F[Unit] = + def completeRevoked(chunk: Chunk[CommittableConsumerRecord[F, K, V]]): F[Unit] = callback((chunk, FetchCompletedReason.TopicPartitionRevoked)) - def completeRecords( - chunk: Chunk[KafkaByteConsumerRecord] - ): F[Unit] = + def completeRecords(chunk: Chunk[CommittableConsumerRecord[F, K, V]]): F[Unit] = callback((chunk, FetchCompletedReason.FetchedRecords)) override def toString: String = @@ -456,16 +482,16 @@ private[kafka] object KafkaConsumerActor { type StreamId = Int - final case class State[F[_]]( - fetches: Map[TopicPartition, Map[StreamId, FetchRequest[F]]], - records: Map[TopicPartition, NonEmptyVector[KafkaByteConsumerRecord]], + final case class State[F[_], K, V]( + fetches: Map[TopicPartition, Map[StreamId, FetchRequest[F, K, V]]], + records: Map[TopicPartition, NonEmptyVector[CommittableConsumerRecord[F, K, V]]], pendingCommits: Chain[Request.Commit[F]], onRebalances: Chain[OnRebalance[F]], rebalancing: Boolean, subscribed: Boolean, streaming: Boolean ) { - def withOnRebalance(onRebalance: OnRebalance[F]): State[F] = + def withOnRebalance(onRebalance: OnRebalance[F]): State[F, K, V] = copy(onRebalances = onRebalances append onRebalance) /** @@ -474,18 +500,18 @@ private[kafka] object KafkaConsumerActor { def withFetch( partition: TopicPartition, streamId: StreamId, - callback: ((Chunk[KafkaByteConsumerRecord], FetchCompletedReason)) => F[Unit] - ): (State[F], List[FetchRequest[F]]) = { + callback: ((Chunk[CommittableConsumerRecord[F, K, V]], FetchCompletedReason)) => F[Unit] + ): (State[F, K, V], List[FetchRequest[F, K, V]]) = { val newFetchRequest = FetchRequest(callback) - val oldPartitionFetches: Map[StreamId, FetchRequest[F]] = + val oldPartitionFetches: Map[StreamId, FetchRequest[F, K, V]] = fetches.getOrElse(partition, Map.empty) - val newFetches: Map[TopicPartition, Map[StreamId, FetchRequest[F]]] = + val newFetches: Map[TopicPartition, Map[StreamId, FetchRequest[F, K, V]]] = fetches.updated(partition, oldPartitionFetches.updated(streamId, newFetchRequest)) - val fetchesToRevoke: List[FetchRequest[F]] = + val fetchesToRevoke: List[FetchRequest[F, K, V]] = oldPartitionFetches.get(streamId).toList ( @@ -494,41 +520,41 @@ private[kafka] object KafkaConsumerActor { ) } - def withoutFetches(partitions: Set[TopicPartition]): State[F] = + def withoutFetches(partitions: Set[TopicPartition]): State[F, K, V] = copy( fetches = fetches.filterKeysStrict(!partitions.contains(_)) ) def withRecords( - records: Map[TopicPartition, NonEmptyVector[KafkaByteConsumerRecord]] - ): State[F] = + records: Map[TopicPartition, NonEmptyVector[CommittableConsumerRecord[F, K, V]]] + ): State[F, K, V] = copy(records = this.records combine records) - def withoutFetchesAndRecords(partitions: Set[TopicPartition]): State[F] = + def withoutFetchesAndRecords(partitions: Set[TopicPartition]): State[F, K, V] = copy( fetches = fetches.filterKeysStrict(!partitions.contains(_)), records = records.filterKeysStrict(!partitions.contains(_)) ) - def withoutRecords(partitions: Set[TopicPartition]): State[F] = + def withoutRecords(partitions: Set[TopicPartition]): State[F, K, V] = copy(records = records.filterKeysStrict(!partitions.contains(_))) - def withPendingCommit(pendingCommit: Request.Commit[F]): State[F] = + def withPendingCommit(pendingCommit: Request.Commit[F]): State[F, K, V] = copy(pendingCommits = pendingCommits append pendingCommit) - def withoutPendingCommits: State[F] = + def withoutPendingCommits: State[F, K, V] = if (pendingCommits.isEmpty) this else copy(pendingCommits = Chain.empty) - def withRebalancing(rebalancing: Boolean): State[F] = + def withRebalancing(rebalancing: Boolean): State[F, K, V] = if (this.rebalancing == rebalancing) this else copy(rebalancing = rebalancing) - def asSubscribed: State[F] = + def asSubscribed: State[F, K, V] = if (subscribed) this else copy(subscribed = true) - def asUnsubscribed: State[F] = + def asUnsubscribed: State[F, K, V] = if (!subscribed) this else copy(subscribed = false) - def asStreaming: State[F] = + def asStreaming: State[F, K, V] = if (streaming) this else copy(streaming = true) override def toString: String = { @@ -547,7 +573,7 @@ private[kafka] object KafkaConsumerActor { } object State { - def empty[F[_]]: State[F] = + def empty[F[_], K, V]: State[F, K, V] = State( fetches = Map.empty, records = Map.empty, @@ -580,13 +606,13 @@ private[kafka] object KafkaConsumerActor { "OnRebalance$" + System.identityHashCode(this) } - sealed abstract class Request[F[_]] + sealed abstract class Request[F[_], -K, -V] object Request { final case class WithPermit[F[_], A](fa: F[A], callback: Either[Throwable, A] => F[Unit]) - extends Request[F] + extends Request[F, Any, Any] - final case class Poll[F[_]]() extends Request[F] + final case class Poll[F[_]]() extends Request[F, Any, Any] private[this] val pollInstance: Poll[Nothing] = Poll[Nothing]() @@ -597,16 +623,16 @@ private[kafka] object KafkaConsumerActor { final case class Commit[F[_]]( offsets: Map[TopicPartition, OffsetAndMetadata], callback: Either[Throwable, Unit] => Unit - ) extends Request[F] + ) extends Request[F, Any, Any] final case class ManualCommitAsync[F[_]]( offsets: Map[TopicPartition, OffsetAndMetadata], callback: Either[Throwable, Unit] => F[Unit] - ) extends Request[F] + ) extends Request[F, Any, Any] final case class ManualCommitSync[F[_]]( offsets: Map[TopicPartition, OffsetAndMetadata], callback: Either[Throwable, Unit] => F[Unit] - ) extends Request[F] + ) extends Request[F, Any, Any] } } diff --git a/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala b/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala index ea79bcde2..c0b41726d 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala @@ -9,7 +9,7 @@ package fs2.kafka.internal import cats.data.{Chain, NonEmptyList, NonEmptySet, NonEmptyVector} import cats.syntax.all._ import fs2.Chunk -import fs2.kafka._ +import fs2.kafka.CommittableConsumerRecord import fs2.kafka.instances._ import fs2.kafka.internal.KafkaConsumerActor._ import fs2.kafka.internal.LogLevel._ @@ -27,7 +27,7 @@ private[kafka] sealed abstract class LogEntry { private[kafka] object LogEntry { final case class SubscribedTopics[F[_]]( topics: NonEmptyList[String], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -36,7 +36,7 @@ private[kafka] object LogEntry { final case class ManuallyAssignedPartitions[F[_]]( partitions: NonEmptySet[TopicPartition], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -45,7 +45,7 @@ private[kafka] object LogEntry { final case class SubscribedPattern[F[_]]( pattern: Pattern, - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -53,17 +53,17 @@ private[kafka] object LogEntry { } final case class Unsubscribed[F[_]]( - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = s"Consumer unsubscribed from all partitions. Current state [$state]." } - final case class StoredFetch[F[_]]( + final case class StoredFetch[F[_], K, V]( partition: TopicPartition, - callback: ((Chunk[KafkaByteConsumerRecord], FetchCompletedReason)) => F[Unit], - state: State[F] + callback: ((Chunk[CommittableConsumerRecord[F, K, V]], FetchCompletedReason)) => F[Unit], + state: State[F, K, V] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -72,7 +72,7 @@ private[kafka] object LogEntry { final case class StoredOnRebalance[F[_]]( onRebalance: OnRebalance[F], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -81,7 +81,7 @@ private[kafka] object LogEntry { final case class AssignedPartitions[F[_]]( partitions: SortedSet[TopicPartition], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -90,7 +90,7 @@ private[kafka] object LogEntry { final case class RevokedPartitions[F[_]]( partitions: SortedSet[TopicPartition], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -99,7 +99,7 @@ private[kafka] object LogEntry { final case class CompletedFetchesWithRecords[F[_]]( records: Records[F], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -108,7 +108,7 @@ private[kafka] object LogEntry { final case class RevokedFetchesWithRecords[F[_]]( records: Records[F], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -117,7 +117,7 @@ private[kafka] object LogEntry { final case class RevokedFetchesWithoutRecords[F[_]]( partitions: Set[TopicPartition], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -126,7 +126,7 @@ private[kafka] object LogEntry { final case class RemovedRevokedRecords[F[_]]( records: Records[F], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -135,7 +135,7 @@ private[kafka] object LogEntry { final case class StoredRecords[F[_]]( records: Records[F], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -153,7 +153,7 @@ private[kafka] object LogEntry { final case class StoredPendingCommit[F[_]]( commit: Request.Commit[F], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -162,7 +162,7 @@ private[kafka] object LogEntry { final case class CommittedPendingCommits[F[_]]( pendingCommits: Chain[Request.Commit[F]], - state: State[F] + state: State[F, _, _] ) extends LogEntry { override def level: LogLevel = Debug override def message: String = @@ -178,12 +178,12 @@ private[kafka] object LogEntry { case (append, (tp, ms)) => append(tp.show) append(" -> { first: ") - append(ms.head.offset.show) + append(ms.head.offset.offsetAndMetadata.show) append(", last: ") - append(ms.last.offset.show) + append(ms.last.offset.offsetAndMetadata.show) append(" }") }("", ", ", "") private[this] type Records[F[_]] = - Map[TopicPartition, NonEmptyVector[KafkaByteConsumerRecord]] + Map[TopicPartition, NonEmptyVector[CommittableConsumerRecord[F, _, _]]] } diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala index 9556c1b83..7d5dcde99 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala @@ -29,7 +29,7 @@ private[kafka] object WithAdminClient { val withAdminClient = new WithAdminClient[G] { override def apply[A](f: AdminClient => KafkaFuture[A]): G[A] = - G.defer(f(adminClient).cancelable) + G.delay(f(adminClient)).cancelable } val close = diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index b5f5273bb..02d5969f5 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -13,7 +13,6 @@ import fs2.kafka.{Header, Headers, KafkaHeaders} import scala.jdk.CollectionConverters._ import java.util import org.apache.kafka.common.KafkaFuture -import org.apache.kafka.common.KafkaFuture.BaseFunction import scala.collection.immutable.{ArraySeq, SortedSet} private[kafka] object syntax { @@ -160,19 +159,11 @@ private[kafka] object syntax { } } - implicit final class KafkaFutureSyntax[A]( - private val future: KafkaFuture[A] + implicit final class KafkaFutureSyntax[F[_], A]( + private val futureF: F[KafkaFuture[A]] ) extends AnyVal { - private[this] def baseFunction[B](f: A => B): BaseFunction[A, B] = f(_) - - def map[B](f: A => B): KafkaFuture[B] = - future.thenApply(baseFunction(f)) - - def void: KafkaFuture[Unit] = - map(_ => ()) - def cancelable[F[_]](implicit F: Async[F]): F[A] = - F.fromCompletableFuture(F.delay(future.toCompletionStage.toCompletableFuture)) + F.fromCompletableFuture(futureF.map(_.toCompletionStage.toCompletableFuture)) } implicit final class KafkaHeadersSyntax( diff --git a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala index 2a9085d08..89a9c6399 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala @@ -55,6 +55,7 @@ import org.apache.kafka.common.serialization.StringSerializer import java.util.concurrent.TimeUnit import org.apache.kafka.common.serialization.StringDeserializer import org.scalatest.Args +import org.testcontainers.utility.DockerImageName abstract class BaseKafkaSpec extends BaseAsyncSpec with ForAllTestContainer { @@ -66,29 +67,25 @@ abstract class BaseKafkaSpec extends BaseAsyncSpec with ForAllTestContainer { override def runTest(testName: String, args: Args) = super.runTest(testName, args) - private val imageVersion = "7.0.1" + private val imageVersion = "7.2.0" - private lazy val imageName = Option(System.getProperty("os.arch")) match { - case Some("aarch64") => - "niciqy/cp-kafka-arm64" // no official docker image for ARM is available yet - case _ => "confluentinc/cp-kafka" - } + private lazy val imageName = "confluentinc/cp-kafka" - override val container: KafkaContainer = new KafkaContainer() - .configure { container => - container - .withEnv("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", "1") - .withEnv( - "KAFKA_TRANSACTION_ABORT_TIMED_OUT_TRANSACTION_CLEANUP_INTERVAL_MS", - transactionTimeoutInterval.toMillis.toString - ) - .withEnv("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", "1") - .withEnv("KAFKA_AUTHORIZER_CLASS_NAME", "kafka.security.authorizer.AclAuthorizer") - .withEnv("KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND", "true") - .setDockerImageName(s"$imageName:$imageVersion") + override val container: KafkaContainer = + new KafkaContainer(DockerImageName.parse(s"$imageName:$imageVersion")) + .configure { container => + container + .withEnv("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", "1") + .withEnv( + "KAFKA_TRANSACTION_ABORT_TIMED_OUT_TRANSACTION_CLEANUP_INTERVAL_MS", + transactionTimeoutInterval.toMillis.toString + ) + .withEnv("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", "1") + .withEnv("KAFKA_AUTHORIZER_CLASS_NAME", "kafka.security.authorizer.AclAuthorizer") + .withEnv("KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND", "true") - () - } + () + } implicit final val stringSerializer: KafkaSerializer[String] = new StringSerializer diff --git a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala index 4b69ef7f1..573438231 100644 --- a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala @@ -165,6 +165,18 @@ final class DeserializerSpec extends BaseCatsSpec { } } + test("Deserializer#attempt (implicit)") { + val deserializer = + Deserializer[IO, Either[Throwable, String]] + + assert(deserializer.deserialize("topic", Headers.empty, null).unsafeRunSync().isLeft) + + forAll { (s: String) => + val serialized = Serializer[IO, String].serialize("topic", Headers.empty, s).unsafeRunSync() + deserializer.deserialize("topic", Headers.empty, serialized).unsafeRunSync() shouldBe Right(s) + } + } + test("Deserializer#option") { val deserializer = Deserializer[IO, Option[String]] diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 133d98ea4..e52f07e91 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -1,7 +1,6 @@ package fs2.kafka import java.util -import cats.data.NonEmptyList import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.syntax.all._ @@ -268,7 +267,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { it("should abort transactions if committing offsets fails") { withTopic { topic => createCustomTopic(topic, partitions = 3) - val toProduce = (0 to 100).toList.map(n => s"key-$n" -> s"value-$n").toList + val toProduce = (0 to 100).toList.map(n => s"key-$n" -> s"value-$n") val toPassthrough = "passthrough" val error = new RuntimeException("BOOM") @@ -317,8 +316,8 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { } records = Chunk.seq(recordsToProduce.zip(offsets)).map { case (record, offset) => - CommittableProducerRecords( - NonEmptyList.one(record), + CommittableProducerRecords.chunk( + Chunk.singleton(record), offset ) } diff --git a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala index df168010f..865f96ac5 100644 --- a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala @@ -1,9 +1,17 @@ package fs2.kafka.internal +import cats.effect.unsafe.implicits.global +import cats.effect.IO import fs2.kafka._ import fs2.kafka.BaseSpec import fs2.kafka.internal.syntax._ +import org.apache.kafka.common.KafkaFuture + +import java.time.temporal.ChronoUnit.MICROS import org.apache.kafka.common.header.internals.RecordHeaders +import org.apache.kafka.common.internals.KafkaFutureImpl + +import scala.concurrent.duration._ final class SyntaxSpec extends BaseSpec { @@ -39,4 +47,32 @@ final class SyntaxSpec extends BaseSpec { } } } + + describe("KafkaFuture.cancelable") { + + it("should cancel future when fiber is cancelled") { + + @volatile var isFutureCancelled = false + + val test = + for { + gate <- IO.deferred[Unit] + futureIO: IO[KafkaFuture[Unit]] = gate.complete(()) >> IO { + new KafkaFutureImpl[Unit] { + override def cancel(mayInterruptIfRunning: Boolean): Boolean = { + isFutureCancelled = true + true + } + } + } + fiber <- futureIO.cancelable.start + _ <- IO(assert(!isFutureCancelled)) + _ <- gate.get // wait for future to be created before canceling it + _ <- fiber.cancel + _ <- IO(assert(isFutureCancelled)) + } yield () + test.unsafeRunSync() + } + + } } diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index 6405c077e..4b88c5e72 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -26,20 +26,30 @@ final class AvroDeserializer[A] private[vulcan] ( case (deserializer, schemaRegistryClient) => Deserializer.instance { (topic, _, bytes) => F.defer { - val writerSchemaId = - ByteBuffer.wrap(bytes).getInt(1) // skip magic byte + if (bytes == null || bytes.length == 0) { + F.raiseError( + new IllegalArgumentException( + s"Invalid Avro record: bytes is null or empty" + ) + ) - val writerSchema = { - val schema = schemaRegistryClient.getSchemaById(writerSchemaId) - if (schema.isInstanceOf[AvroSchema]) - schema.asInstanceOf[AvroSchema].rawSchema() - else - null - } + } else { + val writerSchemaId = + ByteBuffer.wrap(bytes).getInt(1) // skip magic byte + + val writerSchema = { + val schema = schemaRegistryClient.getSchemaById(writerSchemaId) + if (schema.isInstanceOf[AvroSchema]) + schema.asInstanceOf[AvroSchema].rawSchema() + else + null + } - codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { - case Right(a) => F.pure(a) - case Left(error) => F.raiseError(error.throwable) + codec + .decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { + case Right(a) => F.pure(a) + case Left(error) => F.raiseError(error.throwable) + } } } } diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala index fccbb9fd6..c8a3179dc 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala @@ -2,6 +2,7 @@ package fs2.kafka.vulcan import cats.effect.IO import cats.effect.unsafe.implicits.global +import fs2.kafka.Headers import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient import org.scalatest.funspec.AnyFunSpec import vulcan.{AvroError, Codec} @@ -31,6 +32,13 @@ final class AvroDeserializerSpec extends AnyFunSpec { assert(deserializer.forValue.attempt.unsafeRunSync().isLeft) } + it("raises IllegalArgumentException if the data is null") { + val deserializer = AvroDeserializer[String].using(avroSettings) + intercept[IllegalArgumentException] { + deserializer.forKey.flatMap(_.deserialize("foo", Headers.empty, null)).unsafeRunSync() + } + } + it("toString") { assert { avroDeserializer[Int].toString() startsWith "AvroDeserializer$" diff --git a/project/plugins.sbt b/project/plugins.sbt index a393ead11..7d5643f8c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,4 +5,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") -addSbtPlugin("com.codecommit" % "sbt-github-actions" % "0.14.2") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.12") From 667f7f9768cf430c3c1167664a0a3c838d97e43b Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 16 Jul 2022 22:48:54 +0100 Subject: [PATCH 093/162] Fix compile errors --- build.sbt | 7 ------- .../core/src/main/scala/fs2/kafka/ProducerSettings.scala | 1 - .../main/scala/fs2/kafka/internal/KafkaConsumerActor.scala | 4 ++-- .../core/src/main/scala/fs2/kafka/internal/syntax.scala | 4 +++- .../src/test/scala/fs2/kafka/internal/SyntaxSpec.scala | 3 --- 5 files changed, 5 insertions(+), 14 deletions(-) diff --git a/build.sbt b/build.sbt index 7747b0876..af6ad8e5d 100644 --- a/build.sbt +++ b/build.sbt @@ -260,13 +260,6 @@ lazy val publishSettings = ) ) -ThisBuild / mimaBinaryIssueFilters ++= { - import com.typesafe.tools.mima.core._ - // format: off - Seq.empty - // format: on -} - lazy val noMimaSettings = Seq(mimaPreviousArtifacts := Set()) lazy val noPublishSettings = diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index c0f30df93..e1eb3f0f1 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -7,7 +7,6 @@ package fs2.kafka import cats.{Applicative, Show} -import cats.syntax.all._ import fs2.kafka.security.KafkaCredentialStore import org.apache.kafka.clients.producer.ProducerConfig diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index dc345f9fa..d80b5def8 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -48,8 +48,8 @@ import scala.collection.immutable.SortedSet */ private[kafka] final class KafkaConsumerActor[F[_], K, V]( settings: ConsumerSettings[F, K, V], - keyDeserializer: Deserializer[F, K], - valueDeserializer: Deserializer[F, V], + keyDeserializer: KeyDeserializer[F, K], + valueDeserializer: ValueDeserializer[F, V], val ref: Ref[F, State[F, K, V]], requests: Queue[F, Request[F, K, V]], withConsumer: WithConsumer[F] diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index 02d5969f5..49e5a447c 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -10,9 +10,11 @@ import cats.{FlatMap, Foldable, Show} import cats.effect.Async import cats.syntax.all._ import fs2.kafka.{Header, Headers, KafkaHeaders} + import scala.jdk.CollectionConverters._ import java.util import org.apache.kafka.common.KafkaFuture + import scala.collection.immutable.{ArraySeq, SortedSet} private[kafka] object syntax { @@ -162,7 +164,7 @@ private[kafka] object syntax { implicit final class KafkaFutureSyntax[F[_], A]( private val futureF: F[KafkaFuture[A]] ) extends AnyVal { - def cancelable[F[_]](implicit F: Async[F]): F[A] = + def cancelable(implicit F: Async[F]): F[A] = F.fromCompletableFuture(futureF.map(_.toCompletionStage.toCompletableFuture)) } diff --git a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala index 865f96ac5..42d530bf2 100644 --- a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala @@ -7,12 +7,9 @@ import fs2.kafka.BaseSpec import fs2.kafka.internal.syntax._ import org.apache.kafka.common.KafkaFuture -import java.time.temporal.ChronoUnit.MICROS import org.apache.kafka.common.header.internals.RecordHeaders import org.apache.kafka.common.internals.KafkaFutureImpl -import scala.concurrent.duration._ - final class SyntaxSpec extends BaseSpec { describe("Map#filterKeysStrictValuesList") { From 200b480c1ef3f9ea2c01700a20ea0d9cca0c4124 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 16 Jul 2022 22:49:09 +0100 Subject: [PATCH 094/162] Reinstate optimised ProducerRecords constructor --- .../src/main/scala/fs2/kafka/package.scala | 20 +++---------------- 1 file changed, 3 insertions(+), 17 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 4390cb7e6..2dc7fcc96 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -91,29 +91,15 @@ package kafka { } package kafka { + import cats.Foldable + object ProducerRecords { def apply[F[+_], K, V]( records: F[ProducerRecord[K, V]] )( implicit F: Traverse[F] - ): ProducerRecords[K, V] = { - val numRecords = F.size(records).toInt - if (numRecords <= 1) { - F.get(records)(0) match { - case None => Chunk.empty[ProducerRecord[K, V]] - case Some(record) => Chunk.singleton(record) - } - } else { - val buf = new mutable.ArrayBuffer[ProducerRecord[K, V]](numRecords) - F.foldLeft(records, ()) { - case (_, record) => - buf += record - () - } - Chunk.array(buf.toArray) - } - } + ): ProducerRecords[K, V] = Chunk.iterable(Foldable[F].toIterable(records)) def one[K, V](record: ProducerRecord[K, V]): ProducerRecords[K, V] = Chunk.singleton(record) From 7585238689d89967aa2c67f0ae10b2c946d03903 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 16 Jul 2022 23:00:25 +0100 Subject: [PATCH 095/162] Fix build, update github workflow --- .github/workflows/ci.yml | 6 ++---- build.sbt | 2 -- project/plugins.sbt | 3 +-- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 64406467c..3596d1ace 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,11 +28,9 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.12.16, 2.13.8, 3.1.3] + scala: [2.13.8, 3.1.3] java: [temurin@8, temurin@17] exclude: - - scala: 2.12.16 - java: temurin@17 - scala: 3.1.3 java: temurin@17 runs-on: ${{ matrix.os }} @@ -171,4 +169,4 @@ jobs: GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt '++${{ matrix.scala }}' ci-release docs/docusaurusPublishGhpages + run: sbt '++${{ matrix.scala }}' ci-release diff --git a/build.sbt b/build.sbt index 6e2d8911a..ef868598e 100644 --- a/build.sbt +++ b/build.sbt @@ -18,8 +18,6 @@ val scala2 = "2.13.8" val scala3 = "3.1.3" -val scala3 = "3.1.3" - ThisBuild / tlBaseVersion := "3.0" lazy val `fs2-kafka` = project diff --git a/project/plugins.sbt b/project/plugins.sbt index 7d5643f8c..8a961e410 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,8 +1,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.12") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.13") From c6b8aaa9c710333bbc2642cebf1a79d8bacc9a63 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 16 Jul 2022 23:03:54 +0100 Subject: [PATCH 096/162] Remove unused import --- modules/core/src/main/scala/fs2/kafka/package.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 4390cb7e6..f96b5413e 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -10,7 +10,6 @@ import fs2.Chunk import cats.Traverse import cats.effect._ -import scala.collection.mutable import scala.concurrent.duration.FiniteDuration import org.apache.kafka.clients.producer.RecordMetadata From 5b8cabfbe8dde5e9723d02708a4105f945407a1d Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 18 Jul 2022 09:36:49 +0100 Subject: [PATCH 097/162] Remove unused import --- modules/core/src/main/scala/fs2/kafka/package.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 2dc7fcc96..d34c60ee2 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -10,7 +10,6 @@ import fs2.Chunk import cats.Traverse import cats.effect._ -import scala.collection.mutable import scala.concurrent.duration.FiniteDuration import org.apache.kafka.clients.producer.RecordMetadata From 1b958a48cbd48a444dc612cfa0d0cec47ca5ea64 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 21 Jul 2022 13:56:13 +0000 Subject: [PATCH 098/162] Update sbt to 1.7.1 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index f6acff8b3..d738b858c 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.6.2 +sbt.version = 1.7.1 From 38f894e7dcf64364bf01c56e79698cf7df4274a7 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 4 Aug 2022 14:03:16 +0000 Subject: [PATCH 099/162] Update kafka-clients to 3.2.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d311ca195..29a735340 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.1.2" val fs2Version = "3.2.5" -val kafkaVersion = "3.1.1" +val kafkaVersion = "3.2.1" val testcontainersScalaVersion = "0.40.8" From 2d1e25ccf5dc249089b405395e7b73b3b0394dc2 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 13 Sep 2022 14:16:32 +0000 Subject: [PATCH 100/162] Update kafka-clients to 3.1.2 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d311ca195..d646d19a2 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.1.2" val fs2Version = "3.2.5" -val kafkaVersion = "3.1.1" +val kafkaVersion = "3.1.2" val testcontainersScalaVersion = "0.40.8" From 55dfc7d5e9eaf85b31275762faf5803e446a1e5c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 2 Nov 2022 15:48:58 +0000 Subject: [PATCH 101/162] Update sbt-mima-plugin to 1.1.1 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8a961e410..27eea0140 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") From 7239529a34fa5b31e30186aa38f722f9df03da33 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 2 Nov 2022 15:50:33 +0000 Subject: [PATCH 102/162] Update kafka-avro-serializer to 7.1.4 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d646d19a2..db0e3dae9 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.13" val catsVersion = "2.6.1" -val confluentVersion = "7.1.2" +val confluentVersion = "7.1.4" val fs2Version = "3.2.5" From 95e57b603b9a4d16446320325f55ed20eb81e995 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 2 Nov 2022 15:50:59 +0000 Subject: [PATCH 103/162] Update kafka-clients to 3.3.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d646d19a2..0d2357199 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.1.2" val fs2Version = "3.2.5" -val kafkaVersion = "3.1.2" +val kafkaVersion = "3.3.1" val testcontainersScalaVersion = "0.40.8" From f1e1556ddf7167f249974af4db12c78133dd6dd9 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 2 Nov 2022 15:51:58 +0000 Subject: [PATCH 104/162] Update sbt to 1.7.3 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index d738b858c..f5b9ea742 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.7.1 +sbt.version = 1.7.3 From cb40690faae213c9a550adf4bcdaa6bc7582e490 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 2 Nov 2022 15:53:14 +0000 Subject: [PATCH 105/162] Update sbt-mdoc to 2.3.6 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8a961e410..87b3b8494 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,6 +2,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.6") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.13") From 99218b59de1d18fa8c44017aebc1687c4ee0a598 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 16 Nov 2022 18:45:39 +0000 Subject: [PATCH 106/162] Update scala3-library to 3.2.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d646d19a2..22199b51f 100644 --- a/build.sbt +++ b/build.sbt @@ -16,7 +16,7 @@ val munitVersion = "0.7.29" val scala2 = "2.13.8" -val scala3 = "3.1.3" +val scala3 = "3.2.1" ThisBuild / tlBaseVersion := "3.0" From 989d8c0a2f546c1103653e37391682fe757779f4 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 16 Nov 2022 18:46:38 +0000 Subject: [PATCH 107/162] Regenerate GitHub Actions workflow Executed command: sbt githubWorkflowGenerate --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e173241a5..ad726f5f5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,10 +28,10 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8, 3.1.3] + scala: [2.13.8, 3.2.1] java: [temurin@8, temurin@17] exclude: - - scala: 3.1.3 + - scala: 3.2.1 java: temurin@17 runs-on: ${{ matrix.os }} steps: From 85198e45ae7501da63e7d8ca6ffe77b68a866c9d Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 16 Nov 2022 18:47:01 +0000 Subject: [PATCH 108/162] Update sbt-typelevel to 0.4.17 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8a961e410..53ec7b034 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,4 +4,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.2") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.13") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.17") From 7f248b4bb857a41e8f920a65baf51607e5def50e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 16 Nov 2022 18:48:44 +0000 Subject: [PATCH 109/162] Run prePR with sbt-typelevel Executed command: sbt tlPrePrBotHook --- .github/workflows/ci.yml | 8 +++--- .../test/scala/cats/tests/CatsEquality.scala | 23 ++------------- .../src/test/scala/cats/tests/CatsSuite.scala | 23 ++------------- .../src/test/scala/fs2/kafka/AcksSpec.scala | 6 ++++ .../fs2/kafka/AdminClientSettingsSpec.scala | 6 ++++ .../scala/fs2/kafka/AutoOffsetResetSpec.scala | 6 ++++ .../test/scala/fs2/kafka/BaseAsyncSpec.scala | 6 ++++ .../test/scala/fs2/kafka/BaseCatsSpec.scala | 6 ++++ .../test/scala/fs2/kafka/BaseGenerators.scala | 6 ++++ .../test/scala/fs2/kafka/BaseKafkaSpec.scala | 28 +++---------------- .../src/test/scala/fs2/kafka/BaseSpec.scala | 6 ++++ .../scala/fs2/kafka/CommitRecoverySpec.scala | 6 ++++ .../kafka/CommitTimeoutExceptionSpec.scala | 6 ++++ .../CommittableConsumerRecordLawsSpec.scala | 6 ++++ .../kafka/CommittableConsumerRecordSpec.scala | 6 ++++ .../kafka/CommittableOffsetBatchSpec.scala | 6 ++++ .../fs2/kafka/CommittableOffsetLawsSpec.scala | 6 ++++ .../fs2/kafka/CommittableOffsetSpec.scala | 6 ++++ .../CommittableProducerRecordsLawsSpec.scala | 6 ++++ .../fs2/kafka/ConsumerRecordLawsSpec.scala | 6 ++++ .../scala/fs2/kafka/ConsumerRecordSpec.scala | 6 ++++ .../fs2/kafka/ConsumerSettingsSpec.scala | 6 ++++ .../kafka/ConsumerShutdownExceptionSpec.scala | 6 ++++ .../kafka/DeserializationExceptionSpec.scala | 6 ++++ .../scala/fs2/kafka/DeserializerSpec.scala | 6 ++++ .../fs2/kafka/HeaderDeserializerSpec.scala | 6 ++++ .../test/scala/fs2/kafka/HeaderLawsSpec.scala | 6 ++++ .../fs2/kafka/HeaderSerializerSpec.scala | 6 ++++ .../src/test/scala/fs2/kafka/HeaderSpec.scala | 6 ++++ .../scala/fs2/kafka/HeadersLawsSpec.scala | 6 ++++ .../test/scala/fs2/kafka/HeadersSpec.scala | 6 ++++ .../scala/fs2/kafka/IsolationLevelSpec.scala | 6 ++++ .../src/test/scala/fs2/kafka/JitterSpec.scala | 6 ++++ .../fs2/kafka/KafkaAdminClientSpec.scala | 6 ++++ .../scala/fs2/kafka/KafkaConsumerSpec.scala | 6 ++++ .../kafka/KafkaProducerConnectionSpec.scala | 6 ++++ .../scala/fs2/kafka/KafkaProducerSpec.scala | 6 ++++ .../src/test/scala/fs2/kafka/KafkaSpec.scala | 6 ++++ .../fs2/kafka/ProducerRecordLawsSpec.scala | 6 ++++ .../fs2/kafka/ProducerSettingsSpec.scala | 6 ++++ .../kafka/SerializationExceptionSpec.scala | 6 ++++ .../test/scala/fs2/kafka/SerializerSpec.scala | 6 ++++ .../scala/fs2/kafka/TimestampLawsSpec.scala | 6 ++++ .../test/scala/fs2/kafka/TimestampSpec.scala | 6 ++++ .../TransactionalKafkaProducerSpec.scala | 6 ++++ .../kafka/UnexpectedTopicExceptionSpec.scala | 6 ++++ .../scala/fs2/kafka/internal/SyntaxSpec.scala | 6 ++++ .../security/KafkaCredentialStoreSpec.scala | 6 ++++ .../scala/fs2/kafka/vulcan/AuthSpec.scala | 6 ++++ .../kafka/vulcan/AvroDeserializerSpec.scala | 6 ++++ .../fs2/kafka/vulcan/AvroSerializerSpec.scala | 6 ++++ .../fs2/kafka/vulcan/AvroSettingsSpec.scala | 6 ++++ .../scala/fs2/kafka/vulcan/PackageSpec.scala | 6 ++++ .../SchemaRegistryClientSettingsSpec.scala | 6 ++++ 54 files changed, 312 insertions(+), 70 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e173241a5..da4f484c6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -85,12 +85,12 @@ jobs: key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} - name: Check that workflows are up to date - run: sbt '++${{ matrix.scala }}' 'project /' githubWorkflowCheck + run: sbt githubWorkflowCheck - - run: sbt '++${{ matrix.scala }}' ci + - run: sbt '++ ${{ matrix.scala }}' ci - if: matrix.scala == '2.13.8' - run: sbt '++${{ matrix.scala }}' docs/run + run: sbt '++ ${{ matrix.scala }}' docs/run publish: name: Publish Artifacts @@ -169,4 +169,4 @@ jobs: GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt '++${{ matrix.scala }}' tlRelease + run: sbt '++ ${{ matrix.scala }}' tlRelease diff --git a/modules/core/src/test/scala/cats/tests/CatsEquality.scala b/modules/core/src/test/scala/cats/tests/CatsEquality.scala index 9d98e2377..5c92cca13 100644 --- a/modules/core/src/test/scala/cats/tests/CatsEquality.scala +++ b/modules/core/src/test/scala/cats/tests/CatsEquality.scala @@ -1,26 +1,7 @@ /* - * Copied verbatim from cats-testkit-scalatest (https://github.com/typelevel/cats-testkit-scalatest) - * which is licensed as follows: + * Copyright 2018-2022 OVO Energy Limited * - * Copyright (c) 2019 Typelevel - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. + * SPDX-License-Identifier: Apache-2.0 */ package cats diff --git a/modules/core/src/test/scala/cats/tests/CatsSuite.scala b/modules/core/src/test/scala/cats/tests/CatsSuite.scala index 2f39481dd..1b8bac2f1 100644 --- a/modules/core/src/test/scala/cats/tests/CatsSuite.scala +++ b/modules/core/src/test/scala/cats/tests/CatsSuite.scala @@ -1,26 +1,7 @@ /* - * Copied verbatim from cats-testkit-scalatest (https://github.com/typelevel/cats-testkit-scalatest) - * which is licensed as follows: + * Copyright 2018-2022 OVO Energy Limited * - * Copyright (c) 2019 Typelevel - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. + * SPDX-License-Identifier: Apache-2.0 */ package cats diff --git a/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala b/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala index 4d5836c07..61d4d020b 100644 --- a/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class AcksSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala index 1a5f35a0e..721199f0a 100644 --- a/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.syntax.all._ diff --git a/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala b/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala index 87cbe0a54..f4416368c 100644 --- a/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class AutoOffsetResetSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala index 2a97ec92b..0fe22441c 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import org.scalatest.Assertions diff --git a/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala index 7e8d96bcc..91bd7edf0 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats._ diff --git a/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala b/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala index e048d3e47..5421133f5 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.{ApplicativeError, ApplicativeThrow} diff --git a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala index 89a9c6399..61cd13fd1 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala @@ -1,29 +1,9 @@ /* -This file contains code derived from the Embedded Kafka library -(https://github.com/embeddedkafka/embedded-kafka), the license for which is reproduced below. - - The MIT License (MIT) - - Copyright (c) 2016 Emanuele Blanco - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 */ + package fs2.kafka import cats.effect.Sync diff --git a/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala index ff910751c..6ca4cab77 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import org.scalatest._ diff --git a/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala b/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala index bf9b7e64f..052654f04 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.data.Chain diff --git a/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala index 2204faa51..b521540fb 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import org.apache.kafka.clients.consumer.OffsetAndMetadata diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala index c9f792fb7..aad5d48fc 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala index ac19b0f36..c8eb5c20c 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala index f9efccaa2..151393cff 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala index 2e7594c8b..d3a7d6f9f 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala index fa87db323..7ba74d124 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala index 8e416a65e..2e9da26ff 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala index 2d978d9de..167e20315 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.kernel.laws.discipline.EqTests diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala index bec6008da..a967f17a2 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala index cb5e96107..0bd2a4e19 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala index ca5cebd94..b4039b21f 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class ConsumerShutdownExceptionSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala index 1bb982e09..27c6c5b66 100644 --- a/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class DeserializationExceptionSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala index 573438231..acdd4d869 100644 --- a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.Eq diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala index c0346b341..72a6a657e 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.laws.discipline._ diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala index 9099f7774..83ef1954c 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.kernel.laws.discipline.EqTests diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala index 6dec9dfc6..76cbc506e 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats._ diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala index b1a03bdb8..603bde609 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.syntax.show._ diff --git a/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala index 2a4a409a8..297766e92 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.kernel.laws.discipline.EqTests diff --git a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala index 74c22fe2c..4a04bdaef 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.data.Chain diff --git a/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala b/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala index e11eebd56..8cb2b1948 100644 --- a/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class IsolationLevelSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala b/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala index 6d9ef1f28..86540366a 100644 --- a/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala index 88d8f8e86..7be58e0d8 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.{IO, SyncIO} diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala index 1ea125f02..71f27bcd9 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.data.NonEmptySet diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala index dc33490f3..9d4f7a9ff 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index 861b9aec7..14faf8fc3 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.syntax.all._ diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala index 2cad6ee29..5fcdf41b0 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.ApplicativeError diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala index d9e377a3d..30093582f 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.kernel.laws.discipline.EqTests diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala index 2eca0e1c3..713a81972 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.effect.IO diff --git a/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala index 155f71e34..1ffad0c2b 100644 --- a/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class SerializationExceptionSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala index c1d3b823c..80c0d042f 100644 --- a/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats._ diff --git a/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala index 2afbb30bd..b4faf919e 100644 --- a/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.kernel.laws.discipline.EqTests diff --git a/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala b/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala index 38df3ba6e..ab4ebb81e 100644 --- a/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import cats.syntax.show._ diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index e52f07e91..e39e60cce 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka import java.util diff --git a/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala index 0245eebb3..269476fbd 100644 --- a/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka final class UnexpectedTopicExceptionSpec extends BaseSpec { diff --git a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala index e9549cc96..25bbe71eb 100644 --- a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.internal import cats.effect.unsafe.implicits.global diff --git a/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala b/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala index 02f2813f9..a66e0f5f5 100644 --- a/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.security import fs2.kafka.BaseSpec diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala index 1f5c244ac..72317a053 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.vulcan import org.scalatest.funspec.AnyFunSpec diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala index c8a3179dc..238f789c5 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.vulcan import cats.effect.IO diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala index cf2ff288d..1d6ad27aa 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.vulcan import cats.effect.IO diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala index 35d041a45..cb3abe8c1 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.vulcan import cats.effect.IO diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index e8a9bcd8a..dacab63a6 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.vulcan import java.time.Instant diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala index e9ed8a34b..cf48f0686 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala @@ -1,3 +1,9 @@ +/* + * Copyright 2018-2022 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + package fs2.kafka.vulcan import cats.effect.IO From 454e896944edecc66a87028e76003c8da6a7f915 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Sat, 3 Dec 2022 11:44:49 +0000 Subject: [PATCH 110/162] scala-steward (3.x series): ignore dependencies that should be merged forward from 2.x series --- .scala-steward.conf | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/.scala-steward.conf b/.scala-steward.conf index bb5164c92..3bc97b2e1 100644 --- a/.scala-steward.conf +++ b/.scala-steward.conf @@ -14,6 +14,24 @@ updates.ignore = [{ artifactId="fs2-core" },{ groupId = "com.dimafeng" +},{ + groupId = "org.scalameta", + artifactId = "sbt-scalafmt" +},{ + groupId = "de.heikoseeberger", + artifactId = "sbt-header" +},{ + groupId = "org.scala-lang", + artifactId = "scala-library" +},{ + groupId = "org.scala-lang", + artifactId = "scala3-library" +},{ + groupId = "ch.qos.logback", + artifactId = "logback-classic" +},{ + groupId = "org.typelevel", + artifactId = "discipline-scalatest" }] updates.pin = [{ From d95988ba14a959600506fa107f20eda0a5f2053a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 8 Dec 2022 16:36:58 +0000 Subject: [PATCH 111/162] Update kafka-avro-serializer to 7.3.0 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 45070889a..45fa2ef3d 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.13" val catsVersion = "2.6.1" -val confluentVersion = "7.1.4" +val confluentVersion = "7.3.0" val fs2Version = "3.2.5" From c5b5bc87ec9d748dfb86bd858955856c1276a610 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 8 Dec 2022 16:37:25 +0000 Subject: [PATCH 112/162] Update kafka-clients to 3.2.3 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 45070889a..32c2155c0 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.1.4" val fs2Version = "3.2.5" -val kafkaVersion = "3.2.1" +val kafkaVersion = "3.2.3" val testcontainersScalaVersion = "0.40.8" From 21307158da51ff92a5975b974eaa1ebee28d717a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 7 Jan 2023 18:12:30 +0000 Subject: [PATCH 113/162] Update sbt to 1.8.2 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index f5b9ea742..f344c1483 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.7.3 +sbt.version = 1.8.2 From 923fb05b8760c08426204f4817daba83984f81cc Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 29 Jan 2023 16:50:29 +0000 Subject: [PATCH 114/162] Update sbt-mdoc to 2.3.7 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index bb2001773..e741909d5 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,6 +2,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") -addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.6") +addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.17") From a1f5ff568d411a04847e85b4dc81f83f232cdf36 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 24 Feb 2023 06:41:37 +0000 Subject: [PATCH 115/162] Update kafka-avro-serializer to 7.1.6 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 45070889a..979c6acbe 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.3.13" val catsVersion = "2.6.1" -val confluentVersion = "7.1.4" +val confluentVersion = "7.1.6" val fs2Version = "3.2.5" From 9e46f056bf7302ee933646bbb411327401e679a8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 10 Mar 2023 15:23:04 +0000 Subject: [PATCH 116/162] Update sbt-typelevel to 0.4.19 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index bb2001773..7ea55ef06 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,4 +4,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.7.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.6") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.17") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.19") From 581dcb423863434d23b03397e3e0a9d39c90b4fb Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 10 Mar 2023 15:24:31 +0000 Subject: [PATCH 117/162] Run prePR with sbt-typelevel Executed command: sbt tlPrePrBotHook --- .github/workflows/ci.yml | 24 +++++++++---------- .../core/src/main/scala/fs2/kafka/Acks.scala | 2 +- .../scala/fs2/kafka/AdminClientSettings.scala | 2 +- .../scala/fs2/kafka/AutoOffsetReset.scala | 2 +- .../main/scala/fs2/kafka/CommitRecovery.scala | 2 +- .../fs2/kafka/CommitRecoveryException.scala | 2 +- .../fs2/kafka/CommitTimeoutException.scala | 2 +- .../fs2/kafka/CommittableConsumerRecord.scala | 2 +- .../scala/fs2/kafka/CommittableOffset.scala | 2 +- .../fs2/kafka/CommittableOffsetBatch.scala | 2 +- .../kafka/CommittableProducerRecords.scala | 2 +- .../fs2/kafka/ConsumerGroupException.scala | 2 +- .../main/scala/fs2/kafka/ConsumerRecord.scala | 2 +- .../scala/fs2/kafka/ConsumerSettings.scala | 2 +- .../fs2/kafka/ConsumerShutdownException.scala | 2 +- .../fs2/kafka/DeserializationException.scala | 2 +- .../main/scala/fs2/kafka/Deserializer.scala | 2 +- .../src/main/scala/fs2/kafka/Header.scala | 2 +- .../scala/fs2/kafka/HeaderDeserializer.scala | 2 +- .../scala/fs2/kafka/HeaderSerializer.scala | 2 +- .../src/main/scala/fs2/kafka/Headers.scala | 2 +- .../main/scala/fs2/kafka/IsolationLevel.scala | 2 +- .../src/main/scala/fs2/kafka/Jitter.scala | 2 +- .../scala/fs2/kafka/KafkaAdminClient.scala | 2 +- .../main/scala/fs2/kafka/KafkaConsumer.scala | 2 +- .../main/scala/fs2/kafka/KafkaProducer.scala | 2 +- .../fs2/kafka/KafkaProducerConnection.scala | 2 +- .../fs2/kafka/NotSubscribedException.scala | 2 +- .../main/scala/fs2/kafka/ProducerRecord.scala | 2 +- .../scala/fs2/kafka/ProducerSettings.scala | 2 +- .../scala/fs2/kafka/RecordDeserializer.scala | 2 +- .../scala/fs2/kafka/RecordSerializer.scala | 2 +- .../fs2/kafka/SerializationException.scala | 2 +- .../src/main/scala/fs2/kafka/Serializer.scala | 2 +- .../src/main/scala/fs2/kafka/Timestamp.scala | 2 +- .../kafka/TransactionalKafkaProducer.scala | 2 +- .../kafka/TransactionalProducerSettings.scala | 2 +- .../fs2/kafka/UnexpectedTopicException.scala | 2 +- .../scala/fs2/kafka/admin/MkAdminClient.scala | 2 +- .../fs2/kafka/consumer/KafkaAssignment.scala | 2 +- .../fs2/kafka/consumer/KafkaCommit.scala | 2 +- .../fs2/kafka/consumer/KafkaConsume.scala | 2 +- .../consumer/KafkaConsumerLifecycle.scala | 2 +- .../fs2/kafka/consumer/KafkaMetrics.scala | 2 +- .../fs2/kafka/consumer/KafkaOffsets.scala | 2 +- .../fs2/kafka/consumer/KafkaOffsetsV2.scala | 2 +- .../kafka/consumer/KafkaSubscription.scala | 2 +- .../fs2/kafka/consumer/KafkaTopics.scala | 2 +- .../scala/fs2/kafka/consumer/MkConsumer.scala | 2 +- .../src/main/scala/fs2/kafka/instances.scala | 2 +- .../scala/fs2/kafka/internal/Blocking.scala | 2 +- .../scala/fs2/kafka/internal/FakeFiber.scala | 2 +- .../kafka/internal/KafkaConsumerActor.scala | 2 +- .../scala/fs2/kafka/internal/LogEntry.scala | 2 +- .../scala/fs2/kafka/internal/LogLevel.scala | 2 +- .../scala/fs2/kafka/internal/Logging.scala | 2 +- .../fs2/kafka/internal/WithAdminClient.scala | 2 +- .../fs2/kafka/internal/WithConsumer.scala | 2 +- .../fs2/kafka/internal/WithProducer.scala | 2 +- .../internal/WithTransactionalProducer.scala | 2 +- .../scala/fs2/kafka/internal/package.scala | 2 +- .../scala/fs2/kafka/internal/syntax.scala | 2 +- .../src/main/scala/fs2/kafka/package.scala | 2 +- .../scala/fs2/kafka/producer/MkProducer.scala | 2 +- .../kafka/security/KafkaCredentialStore.scala | 2 +- .../test/scala/cats/tests/CatsEquality.scala | 2 +- .../src/test/scala/cats/tests/CatsSuite.scala | 2 +- .../src/test/scala/fs2/kafka/AcksSpec.scala | 2 +- .../fs2/kafka/AdminClientSettingsSpec.scala | 2 +- .../scala/fs2/kafka/AutoOffsetResetSpec.scala | 2 +- .../test/scala/fs2/kafka/BaseAsyncSpec.scala | 2 +- .../test/scala/fs2/kafka/BaseCatsSpec.scala | 2 +- .../test/scala/fs2/kafka/BaseGenerators.scala | 2 +- .../test/scala/fs2/kafka/BaseKafkaSpec.scala | 2 +- .../src/test/scala/fs2/kafka/BaseSpec.scala | 2 +- .../scala/fs2/kafka/CommitRecoverySpec.scala | 2 +- .../kafka/CommitTimeoutExceptionSpec.scala | 2 +- .../CommittableConsumerRecordLawsSpec.scala | 2 +- .../kafka/CommittableConsumerRecordSpec.scala | 2 +- .../kafka/CommittableOffsetBatchSpec.scala | 2 +- .../fs2/kafka/CommittableOffsetLawsSpec.scala | 2 +- .../fs2/kafka/CommittableOffsetSpec.scala | 2 +- .../CommittableProducerRecordsLawsSpec.scala | 2 +- .../fs2/kafka/ConsumerRecordLawsSpec.scala | 2 +- .../scala/fs2/kafka/ConsumerRecordSpec.scala | 2 +- .../fs2/kafka/ConsumerSettingsSpec.scala | 2 +- .../kafka/ConsumerShutdownExceptionSpec.scala | 2 +- .../kafka/DeserializationExceptionSpec.scala | 2 +- .../scala/fs2/kafka/DeserializerSpec.scala | 2 +- .../fs2/kafka/HeaderDeserializerSpec.scala | 2 +- .../test/scala/fs2/kafka/HeaderLawsSpec.scala | 2 +- .../fs2/kafka/HeaderSerializerSpec.scala | 2 +- .../src/test/scala/fs2/kafka/HeaderSpec.scala | 2 +- .../scala/fs2/kafka/HeadersLawsSpec.scala | 2 +- .../test/scala/fs2/kafka/HeadersSpec.scala | 2 +- .../scala/fs2/kafka/IsolationLevelSpec.scala | 2 +- .../src/test/scala/fs2/kafka/JitterSpec.scala | 2 +- .../fs2/kafka/KafkaAdminClientSpec.scala | 2 +- .../scala/fs2/kafka/KafkaConsumerSpec.scala | 2 +- .../kafka/KafkaProducerConnectionSpec.scala | 2 +- .../scala/fs2/kafka/KafkaProducerSpec.scala | 2 +- .../src/test/scala/fs2/kafka/KafkaSpec.scala | 2 +- .../fs2/kafka/ProducerRecordLawsSpec.scala | 2 +- .../fs2/kafka/ProducerSettingsSpec.scala | 2 +- .../kafka/SerializationExceptionSpec.scala | 2 +- .../test/scala/fs2/kafka/SerializerSpec.scala | 2 +- .../scala/fs2/kafka/TimestampLawsSpec.scala | 2 +- .../test/scala/fs2/kafka/TimestampSpec.scala | 2 +- .../TransactionalKafkaProducerSpec.scala | 2 +- .../kafka/UnexpectedTopicExceptionSpec.scala | 2 +- .../scala/fs2/kafka/internal/SyntaxSpec.scala | 2 +- .../security/KafkaCredentialStoreSpec.scala | 2 +- .../kafka/vulcan/testkit/SchemaSuite.scala | 2 +- .../kafka.util/VerifiableProperties.scala | 2 +- .../main/scala/fs2/kafka/vulcan/Auth.scala | 2 +- .../fs2/kafka/vulcan/AvroDeserializer.scala | 2 +- .../fs2/kafka/vulcan/AvroSerializer.scala | 2 +- .../scala/fs2/kafka/vulcan/AvroSettings.scala | 2 +- .../vulcan/SchemaRegistryClientSettings.scala | 2 +- .../main/scala/fs2/kafka/vulcan/package.scala | 2 +- .../scala/fs2/kafka/vulcan/AuthSpec.scala | 2 +- .../kafka/vulcan/AvroDeserializerSpec.scala | 2 +- .../fs2/kafka/vulcan/AvroSerializerSpec.scala | 2 +- .../fs2/kafka/vulcan/AvroSettingsSpec.scala | 2 +- .../scala/fs2/kafka/vulcan/PackageSpec.scala | 2 +- .../SchemaRegistryClientSettingsSpec.scala | 2 +- 126 files changed, 137 insertions(+), 137 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f2094b170..3505f1dab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,21 +36,21 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Checkout current branch (full) - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Download Java (temurin@8) id: download-java-temurin-8 if: matrix.java == 'temurin@8' - uses: typelevel/download-java@v1 + uses: typelevel/download-java@v2 with: distribution: temurin java-version: 8 - name: Setup Java (temurin@8) if: matrix.java == 'temurin@8' - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: jdkfile java-version: 8 @@ -59,21 +59,21 @@ jobs: - name: Download Java (temurin@17) id: download-java-temurin-17 if: matrix.java == 'temurin@17' - uses: typelevel/download-java@v1 + uses: typelevel/download-java@v2 with: distribution: temurin java-version: 17 - name: Setup Java (temurin@17) if: matrix.java == 'temurin@17' - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: jdkfile java-version: 17 jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} - name: Cache sbt - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.sbt @@ -104,21 +104,21 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Checkout current branch (full) - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - name: Download Java (temurin@8) id: download-java-temurin-8 if: matrix.java == 'temurin@8' - uses: typelevel/download-java@v1 + uses: typelevel/download-java@v2 with: distribution: temurin java-version: 8 - name: Setup Java (temurin@8) if: matrix.java == 'temurin@8' - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: jdkfile java-version: 8 @@ -127,21 +127,21 @@ jobs: - name: Download Java (temurin@17) id: download-java-temurin-17 if: matrix.java == 'temurin@17' - uses: typelevel/download-java@v1 + uses: typelevel/download-java@v2 with: distribution: temurin java-version: 17 - name: Setup Java (temurin@17) if: matrix.java == 'temurin@17' - uses: actions/setup-java@v2 + uses: actions/setup-java@v3 with: distribution: jdkfile java-version: 17 jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} - name: Cache sbt - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.sbt diff --git a/modules/core/src/main/scala/fs2/kafka/Acks.scala b/modules/core/src/main/scala/fs2/kafka/Acks.scala index 2a0605d85..12481c20e 100644 --- a/modules/core/src/main/scala/fs2/kafka/Acks.scala +++ b/modules/core/src/main/scala/fs2/kafka/Acks.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/AdminClientSettings.scala b/modules/core/src/main/scala/fs2/kafka/AdminClientSettings.scala index 1fe20305d..49f336d63 100644 --- a/modules/core/src/main/scala/fs2/kafka/AdminClientSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/AdminClientSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/AutoOffsetReset.scala b/modules/core/src/main/scala/fs2/kafka/AutoOffsetReset.scala index 8705e4828..632a90a80 100644 --- a/modules/core/src/main/scala/fs2/kafka/AutoOffsetReset.scala +++ b/modules/core/src/main/scala/fs2/kafka/AutoOffsetReset.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommitRecovery.scala b/modules/core/src/main/scala/fs2/kafka/CommitRecovery.scala index f0b6b9f91..6d5a21138 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommitRecovery.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommitRecovery.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommitRecoveryException.scala b/modules/core/src/main/scala/fs2/kafka/CommitRecoveryException.scala index c46067c37..85d2d84de 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommitRecoveryException.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommitRecoveryException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommitTimeoutException.scala b/modules/core/src/main/scala/fs2/kafka/CommitTimeoutException.scala index 0b43990da..9e388bbb6 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommitTimeoutException.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommitTimeoutException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommittableConsumerRecord.scala b/modules/core/src/main/scala/fs2/kafka/CommittableConsumerRecord.scala index b82913039..4b7f69d71 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommittableConsumerRecord.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommittableConsumerRecord.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommittableOffset.scala b/modules/core/src/main/scala/fs2/kafka/CommittableOffset.scala index aca1d7e93..9412aad76 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommittableOffset.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommittableOffset.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommittableOffsetBatch.scala b/modules/core/src/main/scala/fs2/kafka/CommittableOffsetBatch.scala index dca095c93..e0e3600c3 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommittableOffsetBatch.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommittableOffsetBatch.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala index dcfff51b2..b31a3291a 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerGroupException.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerGroupException.scala index 87e8790f1..c6e671a10 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerGroupException.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerGroupException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala index 5353f498c..9294deb83 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerRecord.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index d5a08c811..c7c313d2b 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerShutdownException.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerShutdownException.scala index a4e2665f7..0f722939a 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerShutdownException.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerShutdownException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/DeserializationException.scala b/modules/core/src/main/scala/fs2/kafka/DeserializationException.scala index 3b296dd96..bf6489bcf 100644 --- a/modules/core/src/main/scala/fs2/kafka/DeserializationException.scala +++ b/modules/core/src/main/scala/fs2/kafka/DeserializationException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala index 88a7638a5..b2d5faf5f 100644 --- a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/Header.scala b/modules/core/src/main/scala/fs2/kafka/Header.scala index e8d89bad9..35e5ba241 100644 --- a/modules/core/src/main/scala/fs2/kafka/Header.scala +++ b/modules/core/src/main/scala/fs2/kafka/Header.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/HeaderDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/HeaderDeserializer.scala index 114f94b57..50592b4d3 100644 --- a/modules/core/src/main/scala/fs2/kafka/HeaderDeserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/HeaderDeserializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/HeaderSerializer.scala b/modules/core/src/main/scala/fs2/kafka/HeaderSerializer.scala index 5c2fbd8d8..ab4d8152f 100644 --- a/modules/core/src/main/scala/fs2/kafka/HeaderSerializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/HeaderSerializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/Headers.scala b/modules/core/src/main/scala/fs2/kafka/Headers.scala index 85ae65b48..21ff77696 100644 --- a/modules/core/src/main/scala/fs2/kafka/Headers.scala +++ b/modules/core/src/main/scala/fs2/kafka/Headers.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/IsolationLevel.scala b/modules/core/src/main/scala/fs2/kafka/IsolationLevel.scala index 18670be77..c234347c0 100644 --- a/modules/core/src/main/scala/fs2/kafka/IsolationLevel.scala +++ b/modules/core/src/main/scala/fs2/kafka/IsolationLevel.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/Jitter.scala b/modules/core/src/main/scala/fs2/kafka/Jitter.scala index d18ab9d03..fde01b6e1 100644 --- a/modules/core/src/main/scala/fs2/kafka/Jitter.scala +++ b/modules/core/src/main/scala/fs2/kafka/Jitter.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index ef4d0e059..7877af518 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala index bd7e89d24..3740b587d 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index 185f68ea0..0dd283e2f 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala index b7d948b78..c74c9a5f6 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/NotSubscribedException.scala b/modules/core/src/main/scala/fs2/kafka/NotSubscribedException.scala index 7f0e2cb11..67104eaab 100644 --- a/modules/core/src/main/scala/fs2/kafka/NotSubscribedException.scala +++ b/modules/core/src/main/scala/fs2/kafka/NotSubscribedException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerRecord.scala b/modules/core/src/main/scala/fs2/kafka/ProducerRecord.scala index 2bd45c799..db9e27a19 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerRecord.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerRecord.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index e1eb3f0f1..01b298858 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala index 652194947..39779d8b7 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala index e961132cd..8f843cc7d 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/SerializationException.scala b/modules/core/src/main/scala/fs2/kafka/SerializationException.scala index afa279c14..80b415648 100644 --- a/modules/core/src/main/scala/fs2/kafka/SerializationException.scala +++ b/modules/core/src/main/scala/fs2/kafka/SerializationException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 922b22425..6b834c6bf 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/Timestamp.scala b/modules/core/src/main/scala/fs2/kafka/Timestamp.scala index d66ec3aeb..a622d82ad 100644 --- a/modules/core/src/main/scala/fs2/kafka/Timestamp.scala +++ b/modules/core/src/main/scala/fs2/kafka/Timestamp.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 65cbb68b8..2e271f753 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerSettings.scala index aa5c132e2..d970e570f 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalProducerSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/UnexpectedTopicException.scala b/modules/core/src/main/scala/fs2/kafka/UnexpectedTopicException.scala index 05723cd29..d037ba4b3 100644 --- a/modules/core/src/main/scala/fs2/kafka/UnexpectedTopicException.scala +++ b/modules/core/src/main/scala/fs2/kafka/UnexpectedTopicException.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala index 226eddb24..6bb02cb90 100644 --- a/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaAssignment.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaAssignment.scala index 5ecdce588..21695d9ea 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaAssignment.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaAssignment.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaCommit.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaCommit.scala index 0f821394d..4e6eef75b 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaCommit.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaCommit.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsume.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsume.scala index cccd56347..8994c1799 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsume.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsume.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsumerLifecycle.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsumerLifecycle.scala index 09ff210ce..7dba47032 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsumerLifecycle.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaConsumerLifecycle.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaMetrics.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaMetrics.scala index 5fe271dce..8316f685c 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaMetrics.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaMetrics.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsets.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsets.scala index c20e47050..a586c796d 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsets.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsets.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsetsV2.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsetsV2.scala index bed064202..7488b817b 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsetsV2.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaOffsetsV2.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaSubscription.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaSubscription.scala index d94fb6e40..e77de6434 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaSubscription.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaSubscription.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaTopics.scala b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaTopics.scala index 79f611247..b31bfcc3f 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/KafkaTopics.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/KafkaTopics.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala b/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala index 21c21e5f4..b5c84e325 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/instances.scala b/modules/core/src/main/scala/fs2/kafka/instances.scala index e740ac72e..24e6a52fb 100644 --- a/modules/core/src/main/scala/fs2/kafka/instances.scala +++ b/modules/core/src/main/scala/fs2/kafka/instances.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/Blocking.scala b/modules/core/src/main/scala/fs2/kafka/internal/Blocking.scala index 2c005d287..7d63d9832 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/Blocking.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/Blocking.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala b/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala index e55a0ee9e..7e3d907c9 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index d80b5def8..96fe35618 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala b/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala index c0b41726d..b2973c73e 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/LogEntry.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/LogLevel.scala b/modules/core/src/main/scala/fs2/kafka/internal/LogLevel.scala index 87f1c2100..5870956f4 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/LogLevel.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/LogLevel.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/Logging.scala b/modules/core/src/main/scala/fs2/kafka/internal/Logging.scala index 362b8ca41..95d371e8e 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/Logging.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/Logging.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala index 7d5dcde99..30ae6ce93 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala index d111e1c00..880105a12 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala index 1b10dddc4..ca4abf572 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala index 42b240c93..d9c26a48f 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/package.scala b/modules/core/src/main/scala/fs2/kafka/internal/package.scala index 47633f0d5..0a4d999e0 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/package.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index 49e5a447c..5d71038a1 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index d34c60ee2..5281fb96a 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala b/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala index bd24bffa4..05caa9e74 100644 --- a/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/main/scala/fs2/kafka/security/KafkaCredentialStore.scala b/modules/core/src/main/scala/fs2/kafka/security/KafkaCredentialStore.scala index bddf99208..e882f6b25 100644 --- a/modules/core/src/main/scala/fs2/kafka/security/KafkaCredentialStore.scala +++ b/modules/core/src/main/scala/fs2/kafka/security/KafkaCredentialStore.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/cats/tests/CatsEquality.scala b/modules/core/src/test/scala/cats/tests/CatsEquality.scala index 5c92cca13..608d01932 100644 --- a/modules/core/src/test/scala/cats/tests/CatsEquality.scala +++ b/modules/core/src/test/scala/cats/tests/CatsEquality.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/cats/tests/CatsSuite.scala b/modules/core/src/test/scala/cats/tests/CatsSuite.scala index 1b8bac2f1..c6cf71602 100644 --- a/modules/core/src/test/scala/cats/tests/CatsSuite.scala +++ b/modules/core/src/test/scala/cats/tests/CatsSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala b/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala index 61d4d020b..a7afcf0e8 100644 --- a/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/AcksSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala index 721199f0a..229be0c21 100644 --- a/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/AdminClientSettingsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala b/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala index f4416368c..9942c944e 100644 --- a/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/AutoOffsetResetSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala index 0fe22441c..4152244c9 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseAsyncSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala index 91bd7edf0..50a29eb0a 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseCatsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala b/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala index 5421133f5..e6a0837eb 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseGenerators.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala index 61cd13fd1..b64139336 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala index 6ca4cab77..bf3ae97d2 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala b/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala index 052654f04..49767d880 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommitRecoverySpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala index b521540fb..5bb4ac87e 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommitTimeoutExceptionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala index aad5d48fc..679d3c1a0 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala index c8eb5c20c..c3e784ea1 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableConsumerRecordSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala index 151393cff..4c407f0f8 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetBatchSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala index d3a7d6f9f..fccb02c38 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala index 7ba74d124..13a497999 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableOffsetSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala index 2e9da26ff..e6e1d4809 100644 --- a/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/CommittableProducerRecordsLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala index 167e20315..e659bc53c 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala index a967f17a2..d59f9501e 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala index 0bd2a4e19..5cca442a1 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala index b4039b21f..48cebb4a1 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerShutdownExceptionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala index 27c6c5b66..9fa80dbe5 100644 --- a/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/DeserializationExceptionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala index acdd4d869..5ded2080b 100644 --- a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala index 72a6a657e..dad224d39 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderDeserializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala index 83ef1954c..2b3c1e294 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala index 76cbc506e..d99bb6212 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderSerializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala index 603bde609..470bede82 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeaderSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala index 297766e92..cebb188da 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeadersLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala index 4a04bdaef..a330f12f7 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala b/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala index 8cb2b1948..4d069c57c 100644 --- a/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/IsolationLevelSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala b/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala index 86540366a..44febde67 100644 --- a/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/JitterSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala index 7be58e0d8..4d0c68825 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaAdminClientSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala index 71f27bcd9..6ba0c2317 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala index 9d4f7a9ff..ca29d99e2 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerConnectionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index 14faf8fc3..2a8d58cf5 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala index 5fcdf41b0..d67ea5b0b 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala index 30093582f..315f8233e 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerRecordLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala index 713a81972..f7e907d46 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala index 1ffad0c2b..be94f31cb 100644 --- a/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/SerializationExceptionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala index 80c0d042f..e224b42ab 100644 --- a/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala b/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala index b4faf919e..9c98322cf 100644 --- a/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TimestampLawsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala b/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala index ab4ebb81e..3971ba5d4 100644 --- a/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TimestampSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index e39e60cce..16a6df9b0 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala b/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala index 269476fbd..4575d32b6 100644 --- a/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/UnexpectedTopicExceptionSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala index 25bbe71eb..beb4cbce8 100644 --- a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala b/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala index a66e0f5f5..3c65b6da9 100644 --- a/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/security/KafkaCredentialStoreSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan-testkit-munit/src/main/scala/fs2/kafka/vulcan/testkit/SchemaSuite.scala b/modules/vulcan-testkit-munit/src/main/scala/fs2/kafka/vulcan/testkit/SchemaSuite.scala index 9dba8bdc2..5fe78ad2c 100644 --- a/modules/vulcan-testkit-munit/src/main/scala/fs2/kafka/vulcan/testkit/SchemaSuite.scala +++ b/modules/vulcan-testkit-munit/src/main/scala/fs2/kafka/vulcan/testkit/SchemaSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala b/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala index 6754b9b04..39a34fbdd 100644 --- a/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala +++ b/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/Auth.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/Auth.scala index b391204c4..618c89df0 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/Auth.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/Auth.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index 4b88c5e72..482dfa742 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index 841c92b86..87fb6442c 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala index ab60f4661..9287af563 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala index d509beda5..309307dcb 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/package.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/package.scala index 39a394f14..4f546cfc4 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/package.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/package.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala index 72317a053..6787ea050 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AuthSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala index 238f789c5..b5f1e678e 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala index 1d6ad27aa..8ab9b6933 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala index cb3abe8c1..a7be4454c 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSettingsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index dacab63a6..591742428 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala index cf48f0686..eb9cb8e77 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/SchemaRegistryClientSettingsSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2022 OVO Energy Limited + * Copyright 2018-2023 OVO Energy Limited * * SPDX-License-Identifier: Apache-2.0 */ From 708c560d93c7998cf72316d6a55edab00b03e7bd Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 18:23:59 +0000 Subject: [PATCH 118/162] Replace deprecated API use --- .../core/src/main/scala/fs2/kafka/KafkaAdminClient.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index 7877af518..679b09557 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -334,13 +334,15 @@ object KafkaAdminClient { partitions: G[TopicPartition] ): ListConsumerGroupOffsetsForPartitions[F] = new ListConsumerGroupOffsetsForPartitions[F] { - private[this] def options: ListConsumerGroupOffsetsOptions = - new ListConsumerGroupOffsetsOptions().topicPartitions(partitions.asJava) + + private[this] val groupOffsets = Map( + groupId -> new ListConsumerGroupOffsetsSpec().topicPartitions(partitions.asJava) + ).asJava override def partitionsToOffsetAndMetadata: F[Map[TopicPartition, OffsetAndMetadata]] = withAdminClient { adminClient => adminClient - .listConsumerGroupOffsets(groupId, options) + .listConsumerGroupOffsets(groupOffsets) .partitionsToOffsetAndMetadata }.map(_.toMap) From 1953b2445f1bec2ce76bfe5a642cf6e4e3da68fe Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 19:01:36 +0000 Subject: [PATCH 119/162] 3.x - pin some dependencies --- .scala-steward.conf | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.scala-steward.conf b/.scala-steward.conf index 3bc97b2e1..8e3e92e57 100644 --- a/.scala-steward.conf +++ b/.scala-steward.conf @@ -32,7 +32,14 @@ updates.ignore = [{ },{ groupId = "org.typelevel", artifactId = "discipline-scalatest" -}] +},{ + groupId = "ch.qos.logback", + artifactId = "logback-classic" +},{ + groupId = "com.github.fd4s", + artifactId = "vulcan" +} +] updates.pin = [{ groupId="org.apache.kafka", From 9a638896c7f9abd0967a3b6930dbb75308e7d5d7 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 19:52:58 +0000 Subject: [PATCH 120/162] Update github workflow --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3505f1dab..8d896ae59 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8, 3.2.1] + scala: [2.13.10, 3.2.1] java: [temurin@8, temurin@17] exclude: - scala: 3.2.1 @@ -89,7 +89,7 @@ jobs: - run: sbt '++ ${{ matrix.scala }}' ci - - if: matrix.scala == '2.13.8' + - if: matrix.scala == '2.13.10' run: sbt '++ ${{ matrix.scala }}' docs/run publish: @@ -99,7 +99,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8] + scala: [2.13.10] java: [temurin@8] runs-on: ${{ matrix.os }} steps: From 89c0e60404db730609bfcda2d6cba4b1f80c83d6 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 20:01:25 +0000 Subject: [PATCH 121/162] Update GH workflow --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 92b8e806a..6a306a008 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -99,7 +99,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.8] + scala: [2.13.10] java: [temurin@8] runs-on: ${{ matrix.os }} steps: From 5990930f253aaaca76f0c776b7048fc681fccff2 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 20:30:02 +0000 Subject: [PATCH 122/162] Restore support for Scala 2.12 --- build.sbt | 12 ++++---- .../fs2/kafka/internal/converters.scala | 22 +++++++++++++++ .../fs2/kafka/internal/converters.scala | 19 +++++++++++++ .../src/main/scala/fs2/kafka/Headers.scala | 2 +- .../scala/fs2/kafka/KafkaAdminClient.scala | 2 +- .../main/scala/fs2/kafka/KafkaConsumer.scala | 9 +++--- .../main/scala/fs2/kafka/KafkaProducer.scala | 10 +++---- .../fs2/kafka/KafkaProducerConnection.scala | 2 +- .../kafka/TransactionalKafkaProducer.scala | 8 +++--- .../scala/fs2/kafka/admin/MkAdminClient.scala | 2 +- .../scala/fs2/kafka/consumer/MkConsumer.scala | 2 +- .../kafka/internal/KafkaConsumerActor.scala | 3 +- .../fs2/kafka/internal/WithAdminClient.scala | 2 +- .../fs2/kafka/internal/WithConsumer.scala | 2 +- .../fs2/kafka/internal/WithProducer.scala | 4 +-- .../internal/WithTransactionalProducer.scala | 2 +- .../scala/fs2/kafka/internal/syntax.scala | 28 +++++++++++++++++-- .../src/main/scala/fs2/kafka/package.scala | 1 - .../scala/fs2/kafka/producer/MkProducer.scala | 2 +- .../test/scala/fs2/kafka/BaseKafkaSpec.scala | 2 +- .../scala/fs2/kafka/ConsumerRecordSpec.scala | 3 +- .../test/scala/fs2/kafka/HeadersSpec.scala | 2 +- .../scala/fs2/kafka/KafkaConsumerSpec.scala | 2 +- .../TransactionalKafkaProducerSpec.scala | 4 +-- .../scala/fs2/kafka/vulcan/AvroSettings.scala | 2 +- .../vulcan/SchemaRegistryClientSettings.scala | 2 +- 26 files changed, 106 insertions(+), 45 deletions(-) create mode 100644 modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala diff --git a/build.sbt b/build.sbt index d46a7bb8b..1514a432d 100644 --- a/build.sbt +++ b/build.sbt @@ -14,7 +14,9 @@ val vulcanVersion = "1.8.4" val munitVersion = "0.7.29" -val scala2 = "2.13.10" +val scala212 = "2.12.17" + +val scala213 = "2.13.10" val scala3 = "3.2.2" @@ -135,7 +137,7 @@ lazy val dependencySettings = Seq( lazy val mdocSettings = Seq( mdoc := (Compile / run).evaluated, scalacOptions --= Seq("-Xfatal-warnings", "-Ywarn-unused"), - crossScalaVersions := Seq(scala2), + crossScalaVersions := Seq(scala213), ScalaUnidoc / unidoc / unidocProjectFilter := inProjects(core, vulcan), ScalaUnidoc / unidoc / target := (LocalRootProject / baseDirectory).value / "website" / "static" / "api", cleanFiles += (ScalaUnidoc / unidoc / target).value, @@ -201,7 +203,7 @@ ThisBuild / githubWorkflowTargetBranches := Seq("series/*") ThisBuild / githubWorkflowBuild := Seq( WorkflowStep.Sbt(List("ci")), - WorkflowStep.Sbt(List("docs/run"), cond = Some(s"matrix.scala == '$scala2'")) + WorkflowStep.Sbt(List("docs/run"), cond = Some(s"matrix.scala == '$scala213'")) ) ThisBuild / githubWorkflowArtifactUpload := false @@ -269,8 +271,8 @@ lazy val noPublishSettings = publishArtifact := false ) -ThisBuild / scalaVersion := scala2 -ThisBuild / crossScalaVersions := Seq(scala2, scala3) +ThisBuild / scalaVersion := scala213 +ThisBuild / crossScalaVersions := Seq(scala212, scala213, scala3) lazy val scalaSettings = Seq( Compile / doc / scalacOptions += "-nowarn", // workaround for https://github.com/scala/bug/issues/12007 but also suppresses genunine problems diff --git a/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala b/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala index e69de29bb..33f9207da 100644 --- a/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala +++ b/modules/core/src/main/scala-2.12/fs2/kafka/internal/converters.scala @@ -0,0 +1,22 @@ +/* + * Copyright 2018-2023 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + +package fs2.kafka.internal + +import java.util.Optional + +private[kafka] object converters { + val collection = scala.collection.JavaConverters + + object option { + implicit class OptionOps[A](private val self: Option[A]) extends AnyVal { + def toJava: Optional[A] = self.fold[Optional[A]](Optional.empty())(Optional.of) + } + } + + def unsafeWrapArray[A](array: Array[A]): Seq[A] = + array +} diff --git a/modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala b/modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala new file mode 100644 index 000000000..6c10181e8 --- /dev/null +++ b/modules/core/src/main/scala-2.13+/fs2/kafka/internal/converters.scala @@ -0,0 +1,19 @@ +/* + * Copyright 2018-2023 OVO Energy Limited + * + * SPDX-License-Identifier: Apache-2.0 + */ + +package fs2.kafka.internal + +import scala.collection.immutable.ArraySeq + +private[kafka] object converters { + val collection = scala.jdk.CollectionConverters + val option = scala.jdk.OptionConverters + + val duration = scala.jdk.DurationConverters + + def unsafeWrapArray[A](array: Array[A]): Seq[A] = + ArraySeq.unsafeWrapArray(array) +} diff --git a/modules/core/src/main/scala/fs2/kafka/Headers.scala b/modules/core/src/main/scala/fs2/kafka/Headers.scala index 21ff77696..c0a468308 100644 --- a/modules/core/src/main/scala/fs2/kafka/Headers.scala +++ b/modules/core/src/main/scala/fs2/kafka/Headers.scala @@ -8,7 +8,7 @@ package fs2.kafka import cats.data.{Chain, NonEmptyChain} import cats.{Eq, Show} -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.internal.syntax._ /** diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index 679b09557..a7daa935c 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -13,7 +13,7 @@ import fs2.Stream import fs2.kafka.KafkaAdminClient._ import fs2.kafka.admin.MkAdminClient import fs2.kafka.internal.WithAdminClient -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.internal.syntax._ import org.apache.kafka.clients.admin._ import org.apache.kafka.clients.consumer.OffsetAndMetadata diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala index d297c4cea..d94836cdf 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala @@ -14,8 +14,7 @@ import cats.effect.implicits._ import cats.syntax.all._ import fs2.{Chunk, Stream} import fs2.kafka.internal._ -import scala.jdk.CollectionConverters._ -import scala.jdk.DurationConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.instances._ import fs2.kafka.internal.KafkaConsumerActor._ import fs2.kafka.internal.syntax._ @@ -682,7 +681,7 @@ object KafkaConsumer { def stream[F[_], K, V]( settings: ConsumerSettings[F, K, V] )(implicit F: Async[F], mk: MkConsumer[F]): Stream[F, KafkaConsumer[F, K, V]] = - Stream.resource(resource(settings)) + Stream.resource(resource(settings)(F, mk)) def apply[F[_]]: ConsumerPartiallyApplied[F] = new ConsumerPartiallyApplied() @@ -704,7 +703,7 @@ object KafkaConsumer { implicit F: Async[F], mk: MkConsumer[F] ): Resource[F, KafkaConsumer[F, K, V]] = - KafkaConsumer.resource(settings) + KafkaConsumer.resource(settings)(F, mk) /** * Alternative version of `stream` where the `F[_]` is @@ -720,7 +719,7 @@ object KafkaConsumer { implicit F: Async[F], mk: MkConsumer[F] ): Stream[F, KafkaConsumer[F, K, V]] = - KafkaConsumer.stream(settings) + KafkaConsumer.stream(settings)(F, mk) override def toString: String = "ConsumerPartiallyApplied$" + System.identityHashCode(this) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index d766eea29..563917519 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -133,7 +133,7 @@ object KafkaProducer { def resource[F[_], K, V]( settings: ProducerSettings[F, K, V] )(implicit F: Async[F], mk: MkProducer[F]): Resource[F, KafkaProducer.PartitionsFor[F, K, V]] = - KafkaProducerConnection.resource(settings).evalMap(_.withSerializersFrom(settings)) + KafkaProducerConnection.resource(settings)(F, mk).evalMap(_.withSerializersFrom(settings)) private[kafka] def from[F[_], K, V]( connection: KafkaProducerConnection[F], @@ -170,7 +170,7 @@ object KafkaProducer { def stream[F[_], K, V]( settings: ProducerSettings[F, K, V] )(implicit F: Async[F], mk: MkProducer[F]): Stream[F, KafkaProducer.PartitionsFor[F, K, V]] = - Stream.resource(KafkaProducer.resource(settings)) + Stream.resource(KafkaProducer.resource(settings)(F, mk)) private[kafka] def produce[F[_]: Async, K, V]( withProducer: WithProducer[F], @@ -217,7 +217,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Pipe[F, ProducerRecords[K, V], ProducerResult[K, V]] = - records => stream(settings).flatMap(pipe(_).apply(records)) + records => stream(settings)(F, mk).flatMap(pipe(_).apply(records)) /** * Produces records in batches using the provided [[KafkaProducer]]. @@ -278,7 +278,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Resource[F, KafkaProducer[F, K, V]] = - KafkaProducer.resource(settings) + KafkaProducer.resource(settings)(F, mk) /** * Alternative version of `stream` where the `F[_]` is @@ -294,7 +294,7 @@ object KafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Stream[F, KafkaProducer[F, K, V]] = - KafkaProducer.stream(settings) + KafkaProducer.stream(settings)(F, mk) override def toString: String = "ProducerPartiallyApplied$" + System.identityHashCode(this) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala index e90d36f97..08f6cb235 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala @@ -14,7 +14,7 @@ import fs2.kafka.producer.MkProducer import org.apache.kafka.common.PartitionInfo import scala.annotation.nowarn -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import org.apache.kafka.common.MetricName import org.apache.kafka.common.Metric diff --git a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala index 81b566b18..d782c3150 100644 --- a/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/TransactionalKafkaProducer.scala @@ -10,7 +10,7 @@ import cats.effect.syntax.all._ import cats.effect.{Async, Outcome, Resource} import cats.syntax.all._ import fs2.kafka.internal._ -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.producer.MkProducer import fs2.{Chunk, Stream} import org.apache.kafka.clients.consumer.ConsumerGroupMetadata @@ -180,7 +180,7 @@ object TransactionalKafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Stream[F, TransactionalKafkaProducer.WithoutOffsets[F, K, V]] = - Stream.resource(resource(settings)) + Stream.resource(resource(settings)(F, mk)) def apply[F[_]]: TransactionalProducerPartiallyApplied[F] = new TransactionalProducerPartiallyApplied @@ -202,7 +202,7 @@ object TransactionalKafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Resource[F, TransactionalKafkaProducer.WithoutOffsets[F, K, V]] = - TransactionalKafkaProducer.resource(settings) + TransactionalKafkaProducer.resource(settings)(F, mk) /** * Alternative version of `stream` where the `F[_]` is @@ -218,7 +218,7 @@ object TransactionalKafkaProducer { implicit F: Async[F], mk: MkProducer[F] ): Stream[F, TransactionalKafkaProducer.WithoutOffsets[F, K, V]] = - TransactionalKafkaProducer.stream(settings) + TransactionalKafkaProducer.stream(settings)(F, mk) override def toString: String = "TransactionalProducerPartiallyApplied$" + System.identityHashCode(this) diff --git a/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala index 6bb02cb90..1e4f1e817 100644 --- a/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/admin/MkAdminClient.scala @@ -9,7 +9,7 @@ package fs2.kafka.admin import cats.effect.Sync import fs2.kafka.AdminClientSettings import org.apache.kafka.clients.admin.AdminClient -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ /** * A capability trait representing the ability to instantiate the Java diff --git a/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala b/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala index b5c84e325..4848d01e2 100644 --- a/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/consumer/MkConsumer.scala @@ -8,7 +8,7 @@ package fs2.kafka.consumer import cats.effect.Sync import fs2.kafka.{ConsumerSettings, KafkaByteConsumer} -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import org.apache.kafka.common.serialization.ByteArrayDeserializer /** diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index 96fe35618..9ae6e2dfe 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -13,8 +13,7 @@ import cats.effect.syntax.all._ import cats.syntax.all._ import fs2.Chunk import fs2.kafka._ -import scala.jdk.CollectionConverters._ -import scala.jdk.DurationConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.instances._ import fs2.kafka.internal.KafkaConsumerActor._ import fs2.kafka.internal.LogEntry._ diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala index 30ae6ce93..2ee5f22a7 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala @@ -13,7 +13,7 @@ import fs2.kafka.admin.MkAdminClient import fs2.kafka.internal.syntax._ import org.apache.kafka.clients.admin.AdminClient import org.apache.kafka.common.KafkaFuture -import scala.jdk.DurationConverters._ +import fs2.kafka.internal.syntax._ private[kafka] sealed abstract class WithAdminClient[F[_]] { def apply[A](f: AdminClient => KafkaFuture[A]): F[A] diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala index 880105a12..70ecadcb9 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithConsumer.scala @@ -10,7 +10,7 @@ import cats.effect.{Async, Resource} import cats.syntax.all._ import fs2.kafka.consumer.MkConsumer import fs2.kafka.{ConsumerSettings, KafkaByteConsumer} -import scala.jdk.DurationConverters._ +import fs2.kafka.internal.syntax._ private[kafka] sealed abstract class WithConsumer[F[_]] { def blocking[A](f: KafkaByteConsumer => A): F[A] diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala index ca4abf572..174777052 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithProducer.scala @@ -7,9 +7,9 @@ package fs2.kafka.internal import cats.effect.{Async, Resource} -import fs2.kafka.{KafkaByteProducer, ProducerSettings} -import scala.jdk.DurationConverters._ +import fs2.kafka.internal.syntax._ import fs2.kafka.producer.MkProducer +import fs2.kafka.{KafkaByteProducer, ProducerSettings} private[kafka] sealed abstract class WithProducer[F[_]] { def apply[A](f: (KafkaByteProducer, Blocking[F]) => F[A]): F[A] diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala index d9c26a48f..54204019c 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithTransactionalProducer.scala @@ -9,7 +9,7 @@ package fs2.kafka.internal import cats.effect.std.Semaphore import cats.effect.{Async, MonadCancelThrow, Resource} import cats.implicits._ -import scala.jdk.DurationConverters._ +import fs2.kafka.internal.syntax._ import fs2.kafka.producer.MkProducer import fs2.kafka.{KafkaByteProducer, TransactionalProducerSettings} diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index 5d71038a1..4f91b6aba 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -10,12 +10,17 @@ import cats.{FlatMap, Foldable, Show} import cats.effect.Async import cats.syntax.all._ import fs2.kafka.{Header, Headers, KafkaHeaders} +import fs2.kafka.internal.converters.unsafeWrapArray +import fs2.kafka.internal.converters.collection._ -import scala.jdk.CollectionConverters._ +import java.time.Duration +import java.time.temporal.ChronoUnit import java.util +import java.util.concurrent.TimeUnit import org.apache.kafka.common.KafkaFuture -import scala.collection.immutable.{ArraySeq, SortedSet} +import scala.collection.immutable.SortedSet +import scala.concurrent.duration.FiniteDuration private[kafka] object syntax { implicit final class LoggingSyntax[F[_], A]( @@ -28,6 +33,23 @@ private[kafka] object syntax { fa.flatMap(a => logging.log(f(a))) } + implicit final class FiniteDurationSyntax( + private val duration: FiniteDuration + ) extends AnyVal { + def toJava: Duration = + if (duration.length == 0L) Duration.ZERO + else + duration.unit match { + case TimeUnit.DAYS => Duration.ofDays(duration.length) + case TimeUnit.HOURS => Duration.ofHours(duration.length) + case TimeUnit.MINUTES => Duration.ofMinutes(duration.length) + case TimeUnit.SECONDS => Duration.ofSeconds(duration.length) + case TimeUnit.MILLISECONDS => Duration.ofMillis(duration.length) + case TimeUnit.MICROSECONDS => Duration.of(duration.length, ChronoUnit.MICROS) + case TimeUnit.NANOSECONDS => Duration.ofNanos(duration.length) + } + } + implicit final class FoldableSyntax[F[_], A]( private val fa: F[A] ) extends AnyVal { @@ -173,7 +195,7 @@ private[kafka] object syntax { ) extends AnyVal { def asScala: Headers = Headers.fromSeq { - ArraySeq.unsafeWrapArray { + unsafeWrapArray { headers.toArray.map { header => Header(header.key, header.value) } diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 5281fb96a..9d66bcd4d 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -6,7 +6,6 @@ package fs2 -import fs2.Chunk import cats.Traverse import cats.effect._ diff --git a/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala b/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala index 05caa9e74..28477ff43 100644 --- a/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/producer/MkProducer.scala @@ -9,7 +9,7 @@ package fs2.kafka.producer import cats.effect.Sync import fs2.kafka.{KafkaByteProducer, ProducerSettings} import org.apache.kafka.common.serialization.ByteArraySerializer -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ /** * A capability trait representing the ability to instantiate the Java diff --git a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala index b64139336..268f0c2f3 100644 --- a/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/BaseKafkaSpec.scala @@ -7,7 +7,7 @@ package fs2.kafka import cats.effect.Sync -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import java.util.UUID import scala.util.Failure import com.dimafeng.testcontainers.{ForAllTestContainer, KafkaContainer} diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala index d59f9501e..259f9149e 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerRecordSpec.scala @@ -13,8 +13,7 @@ import org.apache.kafka.clients.consumer.ConsumerRecord.{NULL_SIZE, NO_TIMESTAMP import org.apache.kafka.common.record.TimestampType import org.apache.kafka.common.record.TimestampType._ import org.scalatest._ -import scala.jdk.OptionConverters._ - +import fs2.kafka.internal.converters.option._ final class ConsumerRecordSpec extends BaseSpec { describe("ConsumerRecord#fromJava") { it("should convert timestamps") { diff --git a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala index a330f12f7..3567bd63c 100644 --- a/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/HeadersSpec.scala @@ -7,7 +7,7 @@ package fs2.kafka import cats.data.Chain -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ final class HeadersSpec extends BaseSpec { describe("Headers#empty") { diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala index 6ba0c2317..1f4e866a3 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaConsumerSpec.scala @@ -14,7 +14,7 @@ import cats.syntax.all._ import cats.effect.unsafe.implicits.global import fs2.Stream import fs2.concurrent.SignallingRef -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import org.apache.kafka.clients.consumer.{ ConsumerConfig, CooperativeStickyAssignor, diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 75f0e260f..0d89fd510 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -13,7 +13,7 @@ import cats.effect.IO import cats.effect.unsafe.implicits.global import cats.syntax.all._ import fs2.{Chunk, Stream} -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.producer.MkProducer import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerGroupMetadata, OffsetAndMetadata} import org.apache.kafka.common.TopicPartition @@ -99,7 +99,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { ) ) ) - ) -> (key, value) + ) -> ((key, value)) } passthrough <- Stream diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala index 9287af563..4fca097ae 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSettings.scala @@ -8,7 +8,7 @@ package fs2.kafka.vulcan import cats.effect.Sync import cats.syntax.all._ -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ import fs2.kafka.internal.syntax._ import io.confluent.kafka.schemaregistry.avro.AvroSchema import org.apache.avro.Schema diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala index 309307dcb..8e336a447 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/SchemaRegistryClientSettings.scala @@ -8,7 +8,7 @@ package fs2.kafka.vulcan import cats.effect.Sync import cats.Show -import scala.jdk.CollectionConverters._ +import fs2.kafka.internal.converters.collection._ /** * Describes how to create a `SchemaRegistryClient` and which From 63fa32b9d5d3512b250658ef7c97b4a2d96bf232 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 20:35:18 +0000 Subject: [PATCH 123/162] Update workflow --- .github/workflows/ci.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6a306a008..c95ee814d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,9 +28,11 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.10, 3.2.2] + scala: [2.12.17, 2.13.10, 3.2.2] java: [temurin@8, temurin@17] exclude: + - scala: 2.12.17 + java: temurin@17 - scala: 3.2.2 java: temurin@17 runs-on: ${{ matrix.os }} From 27bd4b2fa2543a9fecbd179bc69d8a6e44ba8aae Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 24 Mar 2023 20:45:13 +0000 Subject: [PATCH 124/162] Remove unused import --- .../core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala index 2ee5f22a7..229084a76 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/WithAdminClient.scala @@ -13,7 +13,6 @@ import fs2.kafka.admin.MkAdminClient import fs2.kafka.internal.syntax._ import org.apache.kafka.clients.admin.AdminClient import org.apache.kafka.common.KafkaFuture -import fs2.kafka.internal.syntax._ private[kafka] sealed abstract class WithAdminClient[F[_]] { def apply[A](f: AdminClient => KafkaFuture[A]): F[A] From 199a435ab8943b179d7a9b86ab0742f5c74e081c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 26 Mar 2023 14:12:51 +0000 Subject: [PATCH 125/162] Update kafka-avro-serializer to 7.3.2 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d46a7bb8b..0424bd959 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.4.8" val catsVersion = "2.6.1" -val confluentVersion = "7.3.0" +val confluentVersion = "7.3.2" val fs2Version = "3.6.1" From c1448e09b9cdc15de7f2b0f413da7d062e77a30a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 26 Mar 2023 14:13:20 +0000 Subject: [PATCH 126/162] Update kafka-clients to 3.3.2 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d46a7bb8b..3ef4ccb25 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.3.0" val fs2Version = "3.6.1" -val kafkaVersion = "3.3.1" +val kafkaVersion = "3.3.2" val testcontainersScalaVersion = "0.40.12" From f7ddf9695e28b243ea1646180843f7314012e447 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 11:05:02 +0100 Subject: [PATCH 127/162] Update workflow --- .../src/test/scala/fs2/kafka/RecordDeserializerSpec.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala index bc2d2cbd1..d0a9b8327 100644 --- a/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala @@ -27,9 +27,9 @@ class RecordDeserializerSpec extends BaseSpec { val attemptIntRecordDes: RecordDeserializer[IO, Either[Throwable, Int]] = RecordDeserializer - .const(IO.pure(Deserializer[IO, Int].flatMap[Int] { + .const(IO.pure(Deserializer[IO, Int].flatMap { case 1 => Deserializer[IO, Int] - case _ => Deserializer.failWith("Unsupported value") + case _ => Deserializer.failWith[IO, Int]("Unsupported value") })) .attempt From 589c42553b6859aa28115e28abb590f81af8a851 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 11:42:25 +0100 Subject: [PATCH 128/162] Finish compiling --- .../scala/fs2/kafka/ConsumerSettings.scala | 8 ++++---- .../scala/fs2/kafka/ProducerSettings.scala | 8 ++++---- .../scala/fs2/kafka/RecordDeserializer.scala | 8 ++++---- .../scala/fs2/kafka/RecordSerializer.scala | 8 ++++---- .../fs2/kafka/RecordDeserializerSpec.scala | 18 +++++++++--------- .../scala/fs2/kafka/RecordSerializerSpec.scala | 10 +++++----- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index 3c0d3eb8d..717063821 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -52,8 +52,8 @@ sealed abstract class ConsumerSettings[F[_], K, V] { * Note that this will remove any custom `recordMetadata` configuration. **/ def withDeserializers[K0, V0]( - keyDeserializer: F[KeyDeserializer[F, K0]], - valueDeserializer: F[ValueDeserializer[F, V0]] + keyDeserializer: Resource[F, KeyDeserializer[F, K0]], + valueDeserializer: Resource[F, ValueDeserializer[F, V0]] ): ConsumerSettings[F, K0, V0] /** @@ -540,8 +540,8 @@ object ConsumerSettings { s"ConsumerSettings(closeTimeout = $closeTimeout, commitTimeout = $commitTimeout, pollInterval = $pollInterval, pollTimeout = $pollTimeout, commitRecovery = $commitRecovery)" override def withDeserializers[K0, V0]( - keyDeserializer: F[KeyDeserializer[F, K0]], - valueDeserializer: F[ValueDeserializer[F, V0]] + keyDeserializer: Resource[F, KeyDeserializer[F, K0]], + valueDeserializer: Resource[F, ValueDeserializer[F, V0]] ): ConsumerSettings[F, K0, V0] = copy( keyDeserializer = keyDeserializer, diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index bbd9227b8..9e0b4b096 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -43,8 +43,8 @@ sealed abstract class ProducerSettings[F[_], K, V] { * Replace the serializers with those provided in the arguments. */ def withSerializers[K1, V1]( - keySerializer: F[KeySerializer[F, K1]], - valueSerializer: F[ValueSerializer[F, V1]] + keySerializer: Resource[F, KeySerializer[F, K1]], + valueSerializer: Resource[F, ValueSerializer[F, V1]] ): ProducerSettings[F, K1, V1] /** @@ -304,8 +304,8 @@ object ProducerSettings { s"ProducerSettings(closeTimeout = $closeTimeout)" override def withSerializers[K1, V1]( - keySerializer: F[KeySerializer[F, K1]], - valueSerializer: F[ValueSerializer[F, V1]] + keySerializer: Resource[F, KeySerializer[F, K1]], + valueSerializer: Resource[F, ValueSerializer[F, V1]] ): ProducerSettings[F, K1, V1] = copy(keySerializer = keySerializer, valueSerializer = valueSerializer) } diff --git a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala index b77ffc37a..c90bd532f 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala @@ -24,7 +24,7 @@ sealed abstract class RecordDeserializer[F[_], A] { */ final def transform[B]( f: Deserializer[F, A] => Deserializer[F, B] - )(implicit F: Functor[F]): RecordDeserializer[F, B] = + ): RecordDeserializer[F, B] = RecordDeserializer.instance( forKey = forKey.map(des => f(des.asInstanceOf[Deserializer[F, A]])), forValue = forValue.map(des => f(des.asInstanceOf[Deserializer[F, A]])) @@ -44,7 +44,7 @@ sealed abstract class RecordDeserializer[F[_], A] { * * See [[Deserializer.option]] for more details. */ - final def option(implicit F: Functor[F]): RecordDeserializer[F, Option[A]] = + final def option: RecordDeserializer[F, Option[A]] = transform(_.option) } @@ -66,8 +66,8 @@ object RecordDeserializer { forKey: => Resource[F, KeyDeserializer[F, A]], forValue: => Resource[F, ValueDeserializer[F, A]] ): RecordDeserializer[F, A] = { - def _forKey: F[KeyDeserializer[F, A]] = forKey - def _forValue: F[ValueDeserializer[F, A]] = forValue + def _forKey: Resource[F, KeyDeserializer[F, A]] = forKey + def _forValue: Resource[F, ValueDeserializer[F, A]] = forValue new RecordDeserializer[F, A] { override def forKey: Resource[F, KeyDeserializer[F, A]] = diff --git a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala index 0816055d8..bbc791506 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala @@ -24,7 +24,7 @@ sealed abstract class RecordSerializer[F[_], A] { */ final def transform[B]( f: Serializer[F, A] => Serializer[F, B] - )(implicit F: Functor[F]): RecordSerializer[F, B] = + ): RecordSerializer[F, B] = RecordSerializer.instance( forKey = forKey.map(f.asInstanceOf[KeySerializer[F, A] => KeySerializer[F, B]]), forValue = forValue.map(f.asInstanceOf[ValueSerializer[F, A] => ValueSerializer[F, B]]) @@ -36,7 +36,7 @@ sealed abstract class RecordSerializer[F[_], A] { * * See [[Serializer.option]] for more details. */ - final def option(implicit F: Functor[F]): RecordSerializer[F, Option[A]] = + final def option: RecordSerializer[F, Option[A]] = transform(_.option) } @@ -58,8 +58,8 @@ object RecordSerializer { forKey: => Resource[F, KeySerializer[F, A]], forValue: => Resource[F, ValueSerializer[F, A]] ): RecordSerializer[F, A] = { - def _forKey: F[KeySerializer[F, A]] = forKey - def _forValue: F[ValueSerializer[F, A]] = forValue + def _forKey: Resource[F, KeySerializer[F, A]] = forKey + def _forValue: Resource[F, ValueSerializer[F, A]] = forValue new RecordSerializer[F, A] { override def forKey: Resource[F, KeySerializer[F, A]] = diff --git a/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala index d0a9b8327..5adf7fa1e 100644 --- a/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala @@ -11,11 +11,11 @@ class RecordDeserializerSpec extends BaseSpec { val strRecordDes: RecordDeserializer[IO, String] = RecordDeserializer - .const(IO.pure(Deserializer[IO, Int])) + .lift(Deserializer[IO, Int]) .transform(_.map(_.toString)) strRecordDes.forKey - .flatMap(_.deserialize("T1", Headers.empty, serializeToBytes(1))) + .use(_.deserialize("T1", Headers.empty, serializeToBytes(1))) .unsafeRunSync() shouldBe "1" } } @@ -27,18 +27,18 @@ class RecordDeserializerSpec extends BaseSpec { val attemptIntRecordDes: RecordDeserializer[IO, Either[Throwable, Int]] = RecordDeserializer - .const(IO.pure(Deserializer[IO, Int].flatMap { + .lift(Deserializer[IO, Int].flatMap { case 1 => Deserializer[IO, Int] case _ => Deserializer.failWith[IO, Int]("Unsupported value") - })) + }) .attempt attemptIntRecordDes.forKey - .flatMap(_.deserialize("T1", Headers.empty, serializeToBytes(1))) + .use(_.deserialize("T1", Headers.empty, serializeToBytes(1))) .unsafeRunSync() shouldBe Right(1) attemptIntRecordDes.forKey - .flatMap(_.deserialize("T1", Headers.empty, null)) + .use(_.deserialize("T1", Headers.empty, null)) .unsafeRunSync() .isLeft shouldBe true } @@ -49,15 +49,15 @@ class RecordDeserializerSpec extends BaseSpec { val optIntRecordDes: RecordDeserializer[IO, Option[Int]] = RecordDeserializer - .const(IO.pure(Deserializer[IO, Int])) + .lift(Deserializer[IO, Int]) .option optIntRecordDes.forKey - .flatMap(_.deserialize("T1", Headers.empty, serializeToBytes(1))) + .use(_.deserialize("T1", Headers.empty, serializeToBytes(1))) .unsafeRunSync() shouldBe Some(1) optIntRecordDes.forKey - .flatMap(_.deserialize("T1", Headers.empty, null)) + .use(_.deserialize("T1", Headers.empty, null)) .unsafeRunSync() shouldBe None } } diff --git a/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala index 9511c83e7..5852a251c 100644 --- a/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala @@ -11,11 +11,11 @@ class RecordSerializerSpec extends BaseSpec { val intRecordSer: RecordSerializer[IO, Int] = RecordSerializer - .const(IO.pure(Serializer[IO, String])) + .lift(Serializer[IO, String]) .transform(_.contramap(_.toString)) intRecordSer.forKey - .flatMap(_.serialize("T1", Headers.empty, 1)) + .use(_.serialize("T1", Headers.empty, 1)) .unsafeRunSync() shouldBe "1".getBytes } } @@ -25,15 +25,15 @@ class RecordSerializerSpec extends BaseSpec { val optStrRecordSer: RecordSerializer[IO, Option[String]] = RecordSerializer - .const(IO.pure(Serializer[IO, String])) + .lift(Serializer[IO, String]) .option optStrRecordSer.forKey - .flatMap(_.serialize("T1", Headers.empty, Some("1"))) + .use(_.serialize("T1", Headers.empty, Some("1"))) .unsafeRunSync() shouldBe "1".getBytes optStrRecordSer.forKey - .flatMap(_.serialize("T1", Headers.empty, None)) + .use(_.serialize("T1", Headers.empty, None)) .unsafeRunSync() shouldBe null } } From ad5e1a9907b7e2e774d52f88d5bdbfe964e715ec Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 11:46:27 +0100 Subject: [PATCH 129/162] scalafmt --- .../fs2/kafka/vulcan/AvroDeserializer.scala | 36 ++++++++++--------- .../fs2/kafka/vulcan/AvroSerializer.scala | 6 ++-- 2 files changed, 23 insertions(+), 19 deletions(-) diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index 2e67f01d8..fa7bae038 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -31,30 +31,32 @@ final class AvroDeserializer[A] private[vulcan] ( Deserializer.instance { (topic, _, bytes) => F.defer { if (bytes == null || bytes.length == 0) { - F.raiseError( - new IllegalArgumentException( - s"Invalid Avro record: bytes is null or empty" + F.raiseError( + new IllegalArgumentException( + s"Invalid Avro record: bytes is null or empty" + ) ) - ) - } else {val writerSchemaId = - ByteBuffer.wrap(bytes).getInt(1) // skip magic byte + } else { + val writerSchemaId = + ByteBuffer.wrap(bytes).getInt(1) // skip magic byte - val writerSchema = { - val schema = schemaRegistryClient.getSchemaById(writerSchemaId) - if (schema.isInstanceOf[AvroSchema]) - schema.asInstanceOf[AvroSchema].rawSchema() - else - null - } + val writerSchema = { + val schema = schemaRegistryClient.getSchemaById(writerSchemaId) + if (schema.isInstanceOf[AvroSchema]) + schema.asInstanceOf[AvroSchema].rawSchema() + else + null + } - codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { - case Right(a) => F.pure(a) - case Left(error) => F.raiseError(error.throwable) + codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { + case Right(a) => F.pure(a) + case Left(error) => F.raiseError(error.throwable) + } } } } - }} + } RecordDeserializer.instance( forKey = createDeserializer(true), diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index 775226c55..7112fdcd1 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -22,14 +22,16 @@ final class AvroSerializer[A] private[vulcan] ( case Left(e) => Resource.pure(Serializer.fail(e.throwable)) case Right(writerSchema) => Resource - .make(settings.createAvroSerializer(isKey, Some(writerSchema))) { case (ser, _) => F.delay(ser.close()) } + .make(settings.createAvroSerializer(isKey, Some(writerSchema))) { + case (ser, _) => F.delay(ser.close()) + } .map { case (serializer, _) => Serializer.instance { (topic, _, a) => F.defer { codec.encode(a) match { case Right(value) => F.pure(serializer.serialize(topic, value)) - case Left(error) => F.raiseError(error.throwable) + case Left(error) => F.raiseError(error.throwable) } } } From 220ee171cf895582b5aec5a9d5e8b0973d471e3f Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 11:52:10 +0100 Subject: [PATCH 130/162] implicits may not be call-by-name in scala 2 --- modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala index bbc791506..3ae0871a7 100644 --- a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala @@ -73,6 +73,6 @@ object RecordSerializer { } } - implicit def lift[F[_], A](implicit serializer: => Serializer[F, A]): RecordSerializer[F, A] = + implicit def lift[F[_], A](implicit serializer: Serializer[F, A]): RecordSerializer[F, A] = RecordSerializer.const(Resource.pure(serializer)) } From 36beb89d72d680f1916509c039280b40e1f0ca78 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 12:23:37 +0100 Subject: [PATCH 131/162] fix tests --- .../src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala | 2 +- .../src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala index ea2d2b6dc..8ac0e42cf 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala @@ -37,7 +37,7 @@ final class AvroDeserializerSpec extends AnyFunSpec { it("raises IllegalArgumentException if the data is null") { val deserializer = AvroDeserializer[String].using(avroSettings) intercept[IllegalArgumentException] { - deserializer.forKey.flatMap(_.deserialize("foo", Headers.empty, null)).unsafeRunSync() + deserializer.forKey.use(_.deserialize("foo", Headers.empty, null)).unsafeRunSync() } } diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala index 8f95770db..cbdf96b97 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala @@ -27,7 +27,7 @@ final class AvroSerializerSpec extends AnyFunSpec { (avroSerializer[Either[Int, Boolean]] .using(avroSettings) .forValue - .flatMap( + .use( _.serialize( "test-union-topic", Headers.empty, From 639dc0fea96c55e9b1c8882d53aaefa42ec126dd Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 12:25:59 +0100 Subject: [PATCH 132/162] Replace RecordDeserializer with Resource --- .../scala/fs2/kafka/ConsumerSettings.scala | 16 ++-- .../main/scala/fs2/kafka/Deserializer.scala | 7 +- .../scala/fs2/kafka/RecordDeserializer.scala | 86 ------------------- .../fs2/kafka/ConsumerSettingsSpec.scala | 13 +-- .../scala/fs2/kafka/DeserializerSpec.scala | 4 - .../fs2/kafka/RecordDeserializerSpec.scala | 67 --------------- .../fs2/kafka/vulcan/AvroDeserializer.scala | 80 +++++++++-------- .../kafka/vulcan/AvroDeserializerSpec.scala | 28 +++--- .../scala/fs2/kafka/vulcan/PackageSpec.scala | 9 +- 9 files changed, 81 insertions(+), 229 deletions(-) delete mode 100644 modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala delete mode 100644 modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala diff --git a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala index 717063821..dc19feda1 100644 --- a/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ConsumerSettings.scala @@ -581,31 +581,31 @@ object ConsumerSettings { ) def apply[F[_], K, V]( - keyDeserializer: RecordDeserializer[F, K], + keyDeserializer: Resource[F, KeyDeserializer[F, K]], valueDeserializer: ValueDeserializer[F, V] ): ConsumerSettings[F, K, V] = create( - keyDeserializer = keyDeserializer.forKey, + keyDeserializer = keyDeserializer, valueDeserializer = Resource.pure(valueDeserializer) ) def apply[F[_], K, V]( keyDeserializer: KeyDeserializer[F, K], - valueDeserializer: RecordDeserializer[F, V] + valueDeserializer: Resource[F, ValueDeserializer[F, V]] ): ConsumerSettings[F, K, V] = create( keyDeserializer = Resource.pure(keyDeserializer), - valueDeserializer = valueDeserializer.forValue + valueDeserializer = valueDeserializer ) def apply[F[_], K, V]( implicit - keyDeserializer: RecordDeserializer[F, K], - valueDeserializer: RecordDeserializer[F, V] + keyDeserializer: Resource[F, KeyDeserializer[F, K]], + valueDeserializer: Resource[F, ValueDeserializer[F, V]] ): ConsumerSettings[F, K, V] = create( - keyDeserializer = keyDeserializer.forKey, - valueDeserializer = valueDeserializer.forValue + keyDeserializer = keyDeserializer, + valueDeserializer = valueDeserializer ) implicit def consumerSettingsShow[F[_], K, V]: Show[ConsumerSettings[F, K, V]] = diff --git a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala index b2d5faf5f..3c7486a26 100644 --- a/modules/core/src/main/scala/fs2/kafka/Deserializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Deserializer.scala @@ -7,8 +7,9 @@ package fs2.kafka import cats.MonadError -import cats.effect.Sync +import cats.effect.{Resource, Sync} import cats.syntax.all._ + import java.nio.charset.{Charset, StandardCharsets} import java.util.UUID @@ -336,4 +337,8 @@ object GenericDeserializer { implicit def uuid[F[_]](implicit F: Sync[F]): Deserializer[F, UUID] = Deserializer.string[F].map(UUID.fromString).suspend + + implicit def resource[T <: KeyOrValue, F[_], A]( + implicit des: GenericDeserializer[T, F, A] + ): Resource[F, GenericDeserializer[T, F, A]] = Resource.pure(des) } diff --git a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala deleted file mode 100644 index c90bd532f..000000000 --- a/modules/core/src/main/scala/fs2/kafka/RecordDeserializer.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright 2018-2023 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka - -import cats.effect.Resource - -/** - * Deserializer which may vary depending on whether a record - * key or value is being deserialized, and which may require - * a creation effect. - */ -sealed abstract class RecordDeserializer[F[_], A] { - - def forKey: Resource[F, KeyDeserializer[F, A]] - - def forValue: Resource[F, ValueDeserializer[F, A]] - - /** - * Returns a new [[RecordDeserializer]] instance applying the mapping function to key and value deserializers - */ - final def transform[B]( - f: Deserializer[F, A] => Deserializer[F, B] - ): RecordDeserializer[F, B] = - RecordDeserializer.instance( - forKey = forKey.map(des => f(des.asInstanceOf[Deserializer[F, A]])), - forValue = forValue.map(des => f(des.asInstanceOf[Deserializer[F, A]])) - ) - - /** - * Returns a new [[RecordDeserializer]] instance that will catch deserialization - * errors and return them as a value, allowing user code to handle them without - * causing the consumer to fail. - */ - final def attempt: RecordDeserializer[F, Either[Throwable, A]] = - transform(_.attempt) - - /** - * Returns a new [[RecordDeserializer]] instance that will deserialize key and value returning `None` when the - * bytes are `null`, and otherwise returns the result wrapped in `Some`. - * - * See [[Deserializer.option]] for more details. - */ - final def option: RecordDeserializer[F, Option[A]] = - transform(_.option) -} - -object RecordDeserializer { - def apply[F[_], A]( - implicit deserializer: RecordDeserializer[F, A] - ): RecordDeserializer[F, A] = - deserializer - - def const[F[_], A]( - deserializer: => Resource[F, Deserializer[F, A]] - ): RecordDeserializer[F, A] = - RecordDeserializer.instance( - forKey = deserializer, - forValue = deserializer - ) - - def instance[F[_], A]( - forKey: => Resource[F, KeyDeserializer[F, A]], - forValue: => Resource[F, ValueDeserializer[F, A]] - ): RecordDeserializer[F, A] = { - def _forKey: Resource[F, KeyDeserializer[F, A]] = forKey - def _forValue: Resource[F, ValueDeserializer[F, A]] = forValue - - new RecordDeserializer[F, A] { - override def forKey: Resource[F, KeyDeserializer[F, A]] = - _forKey - - override def forValue: Resource[F, ValueDeserializer[F, A]] = - _forValue - - override def toString: String = - "Deserializer.Record$" + System.identityHashCode(this) - } - } - - implicit def lift[F[_], A](implicit deserializer: Deserializer[F, A]): RecordDeserializer[F, A] = - RecordDeserializer.const(Resource.pure(deserializer)) -} diff --git a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala index ef6408c6c..633e50eaf 100644 --- a/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ConsumerSettingsSpec.scala @@ -7,6 +7,7 @@ package fs2.kafka import cats.effect.IO +import cats.effect.kernel.Resource import cats.effect.unsafe.implicits.global import cats.syntax.all._ import org.apache.kafka.clients.consumer.ConsumerConfig @@ -273,12 +274,12 @@ final class ConsumerSettingsSpec extends BaseSpec { it("should be able to create with and without deserializer creation effects") { val deserializer = Deserializer[IO, String] - val recordDeserializer = RecordDeserializer.lift(deserializer) + val deserializerResource = Resource.pure[IO, Deserializer[IO, String]](deserializer) ConsumerSettings(deserializer, deserializer) - ConsumerSettings(recordDeserializer, deserializer) - ConsumerSettings(deserializer, recordDeserializer) - ConsumerSettings(recordDeserializer, recordDeserializer) + ConsumerSettings(deserializerResource, deserializer) + ConsumerSettings(deserializer, deserializerResource) + ConsumerSettings(deserializerResource, deserializerResource) } it("should be able to implicitly create with and without deserializer creation effects") { @@ -286,8 +287,8 @@ final class ConsumerSettingsSpec extends BaseSpec { Deserializer[IO, String] .map(identity) - implicit val deserializer: RecordDeserializer[IO, String] = - RecordDeserializer.lift(deserializerInstance) + implicit val deserializer: Resource[IO, Deserializer[IO, String]] = + Resource.pure(deserializerInstance) ConsumerSettings[IO, Int, Int] ConsumerSettings[IO, String, Int].keyDeserializer diff --git a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala index 5ded2080b..b97aa5ae4 100644 --- a/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/DeserializerSpec.scala @@ -206,8 +206,4 @@ final class DeserializerSpec extends BaseCatsSpec { test("Deserializer#toString") { assert(Deserializer[IO, String].toString startsWith "Deserializer$") } - - test("Deserializer.Record#toString") { - assert(RecordDeserializer[IO, String].toString startsWith "Deserializer.Record$") - } } diff --git a/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala deleted file mode 100644 index 5adf7fa1e..000000000 --- a/modules/core/src/test/scala/fs2/kafka/RecordDeserializerSpec.scala +++ /dev/null @@ -1,67 +0,0 @@ -package fs2.kafka - -import cats.effect.IO - -class RecordDeserializerSpec extends BaseSpec { - - import cats.effect.unsafe.implicits.global - - describe("RecordDeserializer#transform") { - it("should transform the RecordDeserializer applying the function to inner Deserializers") { - - val strRecordDes: RecordDeserializer[IO, String] = - RecordDeserializer - .lift(Deserializer[IO, Int]) - .transform(_.map(_.toString)) - - strRecordDes.forKey - .use(_.deserialize("T1", Headers.empty, serializeToBytes(1))) - .unsafeRunSync() shouldBe "1" - } - } - - describe("RecordDeserializer#attempt") { - it( - "should transform the RecordDeserializer[F, T] to RecordDeserializer[F, Either[Throwable, T]]" - ) { - - val attemptIntRecordDes: RecordDeserializer[IO, Either[Throwable, Int]] = - RecordDeserializer - .lift(Deserializer[IO, Int].flatMap { - case 1 => Deserializer[IO, Int] - case _ => Deserializer.failWith[IO, Int]("Unsupported value") - }) - .attempt - - attemptIntRecordDes.forKey - .use(_.deserialize("T1", Headers.empty, serializeToBytes(1))) - .unsafeRunSync() shouldBe Right(1) - - attemptIntRecordDes.forKey - .use(_.deserialize("T1", Headers.empty, null)) - .unsafeRunSync() - .isLeft shouldBe true - } - } - - describe("RecordDeserializer#option") { - it("should transform the RecordDeserializer[F, T] to RecordDeserializer[F, Option[T]]") { - - val optIntRecordDes: RecordDeserializer[IO, Option[Int]] = - RecordDeserializer - .lift(Deserializer[IO, Int]) - .option - - optIntRecordDes.forKey - .use(_.deserialize("T1", Headers.empty, serializeToBytes(1))) - .unsafeRunSync() shouldBe Some(1) - - optIntRecordDes.forKey - .use(_.deserialize("T1", Headers.empty, null)) - .unsafeRunSync() shouldBe None - } - } - - private def serializeToBytes[T: Serializer[IO, *]](value: T): Array[Byte] = - Serializer[IO, T].serialize("", Headers.empty, value).unsafeRunSync() -} diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index fa7bae038..7f8184f1d 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -9,64 +9,62 @@ package fs2.kafka.vulcan import _root_.vulcan.Codec import cats.effect.Sync import cats.effect.Resource -import fs2.kafka.{Deserializer, RecordDeserializer} +import fs2.kafka.{Deserializer, KeyDeserializer, ValueDeserializer} import io.confluent.kafka.schemaregistry.avro.AvroSchema import java.nio.ByteBuffer final class AvroDeserializer[A] private[vulcan] ( private val codec: Codec[A] ) extends AnyVal { - def using[F[_]]( - settings: AvroSettings[F] - )(implicit F: Sync[F]): RecordDeserializer[F, A] = + + def forKey[F[_]: Sync](settings: AvroSettings[F]): Resource[F, KeyDeserializer[F, A]] = + createDeserializer(isKey = true, settings) + + def forValue[F[_]: Sync](settings: AvroSettings[F]): Resource[F, ValueDeserializer[F, A]] = + createDeserializer(isKey = false, settings) + private def createDeserializer[F[_]](isKey: Boolean, settings: AvroSettings[F])( + implicit F: Sync[F] + ): Resource[F, Deserializer[F, A]] = codec.schema match { case Right(schema) => - def createDeserializer(isKey: Boolean): Resource[F, Deserializer[F, A]] = - Resource - .make(settings.createAvroDeserializer(isKey)) { - case (deserializer, _) => F.delay(deserializer.close()) - } - .map { - case (deserializer, schemaRegistryClient) => - Deserializer.instance { (topic, _, bytes) => - F.defer { - if (bytes == null || bytes.length == 0) { - F.raiseError( - new IllegalArgumentException( - s"Invalid Avro record: bytes is null or empty" - ) + Resource + .make(settings.createAvroDeserializer(isKey)) { + case (deserializer, _) => F.delay(deserializer.close()) + } + .map { + case (deserializer, schemaRegistryClient) => + Deserializer.instance { (topic, _, bytes) => + F.defer { + if (bytes == null || bytes.length == 0) { + F.raiseError( + new IllegalArgumentException( + s"Invalid Avro record: bytes is null or empty" ) + ) - } else { - val writerSchemaId = - ByteBuffer.wrap(bytes).getInt(1) // skip magic byte + } else { + val writerSchemaId = + ByteBuffer.wrap(bytes).getInt(1) // skip magic byte - val writerSchema = { - val schema = schemaRegistryClient.getSchemaById(writerSchemaId) - if (schema.isInstanceOf[AvroSchema]) - schema.asInstanceOf[AvroSchema].rawSchema() - else - null - } + val writerSchema = { + val schema = schemaRegistryClient.getSchemaById(writerSchemaId) + if (schema.isInstanceOf[AvroSchema]) + schema.asInstanceOf[AvroSchema].rawSchema() + else + null + } - codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { - case Right(a) => F.pure(a) - case Left(error) => F.raiseError(error.throwable) - } + codec.decode(deserializer.deserialize(topic, bytes, schema), writerSchema) match { + case Right(a) => F.pure(a) + case Left(error) => F.raiseError(error.throwable) } } } - } - - RecordDeserializer.instance( - forKey = createDeserializer(true), - forValue = createDeserializer(false) - ) + } + } case Left(error) => - RecordDeserializer.const { - Resource.raiseError(error.throwable) - } + Resource.raiseError(error.throwable) } override def toString: String = diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala index 8ac0e42cf..902ffb893 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroDeserializerSpec.scala @@ -6,9 +6,9 @@ package fs2.kafka.vulcan -import cats.effect.IO +import cats.effect.{IO, Resource} import cats.effect.unsafe.implicits.global -import fs2.kafka.Headers +import fs2.kafka.{Headers, KeyDeserializer, ValueDeserializer} import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient import org.scalatest.funspec.AnyFunSpec import vulcan.Codec @@ -16,28 +16,32 @@ import vulcan.Codec final class AvroDeserializerSpec extends AnyFunSpec { describe("AvroDeserializer") { it("can create a deserializer") { - val deserializer = - AvroDeserializer[Int].using(avroSettings) + val forKey: Resource[IO, KeyDeserializer[IO, Int]] = + AvroDeserializer[Int].forKey(avroSettings) - assert(deserializer.forKey.use(IO.pure).attempt.unsafeRunSync().isRight) - assert(deserializer.forValue.use(IO.pure).attempt.unsafeRunSync().isRight) + val forValue: Resource[IO, ValueDeserializer[IO, Int]] = + AvroDeserializer[Int].forValue(avroSettings) + + assert(forKey.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(forValue.use(IO.pure).attempt.unsafeRunSync().isRight) } it("raises schema errors") { val codec: Codec[BigDecimal] = Codec.decimal(-1, -1) - val deserializer = - avroDeserializer(codec).using(avroSettings) + val forKey = avroDeserializer(codec).forKey(avroSettings) + + val forValue = avroDeserializer(codec).forValue(avroSettings) - assert(deserializer.forKey.use(IO.pure).attempt.unsafeRunSync().isLeft) - assert(deserializer.forValue.use(IO.pure).attempt.unsafeRunSync().isLeft) + assert(forKey.use(IO.pure).attempt.unsafeRunSync().isLeft) + assert(forValue.use(IO.pure).attempt.unsafeRunSync().isLeft) } it("raises IllegalArgumentException if the data is null") { - val deserializer = AvroDeserializer[String].using(avroSettings) + val deserializer = AvroDeserializer[String].forKey(avroSettings) intercept[IllegalArgumentException] { - deserializer.forKey.use(_.deserialize("foo", Headers.empty, null)).unsafeRunSync() + deserializer.use(_.deserialize("foo", Headers.empty, null)).unsafeRunSync() } } diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index ffe3325ce..356a95093 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -26,7 +26,8 @@ final class PackageSpec extends AnyFunSpec { describe("avroDeserializer") { it("should be available given explicit settings") { - avroDeserializer[Test].using(avroSettings) + avroDeserializer[Test].forKey(avroSettings) + avroDeserializer[Test].forValue(avroSettings) } } @@ -34,7 +35,7 @@ final class PackageSpec extends AnyFunSpec { it("should be able to do roundtrip serialization") { ( avroSerializer[Either[Test, Int]].using(avroSettings).forValue, - avroDeserializer[Either[Test, Int]].using(avroSettings).forValue + avroDeserializer[Either[Test, Int]].forValue(avroSettings) ).parTupled .use { case (serializer, deserializer) => @@ -51,7 +52,7 @@ final class PackageSpec extends AnyFunSpec { it("should be able to do roundtrip serialization using compatible schemas") { ( avroSerializer[Test2].using(avroSettings).forValue, - avroDeserializer[Test].using(avroSettings).forValue + avroDeserializer[Test].forValue(avroSettings) ).parTupled .use { case (serializer, deserializer) => @@ -68,7 +69,7 @@ final class PackageSpec extends AnyFunSpec { it("should error when reader and writer schemas have mismatching logical types") { ( avroSerializer[Long].using(avroSettings).forValue, - avroDeserializer[Instant].using(avroSettings).forValue + avroDeserializer[Instant].forValue(avroSettings) ).parTupled .use { case (serializer, deserializer) => From d6310ba237a631d3927866fbe48e04fd4cfb8d59 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 12:30:42 +0100 Subject: [PATCH 133/162] Update docs --- docs/src/main/mdoc/modules.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/src/main/mdoc/modules.md b/docs/src/main/mdoc/modules.md index 6ab96bc89..08de140eb 100644 --- a/docs/src/main/mdoc/modules.md +++ b/docs/src/main/mdoc/modules.md @@ -52,14 +52,15 @@ val avroSettings = We can then create a `Serializer` and `Deserializer` instance for `Person`. ```scala mdoc:silent -import fs2.kafka.{RecordDeserializer, RecordSerializer} +import cats.effect.Resource +import fs2.kafka.{RecordSerializer, ValueDeserializer} import fs2.kafka.vulcan.{avroDeserializer, avroSerializer} implicit val personSerializer: RecordSerializer[IO, Person] = avroSerializer[Person].using(avroSettings) -implicit val personDeserializer: RecordDeserializer[IO, Person] = - avroDeserializer[Person].using(avroSettings) +implicit val personDeserializer: Resource[IO, ValueDeserializer[IO, Person]] = + avroDeserializer[Person].forValue(avroSettings) ``` Finally, we can create settings, passing the `Serializer`s and `Deserializer`s implicitly. @@ -125,8 +126,8 @@ avroSettingsSharedClient.map { avroSettings => val personSerializer: RecordSerializer[IO, Person] = avroSerializer[Person].using(avroSettings) - val personDeserializer: RecordDeserializer[IO, Person] = - avroDeserializer[Person].using(avroSettings) + val personDeserializer: Resource[IO, ValueDeserializer[IO, Person]] = + avroDeserializer[Person].forValue(avroSettings) val consumerSettings = ConsumerSettings( From 1fb09d27d45fc3ea280a2281e61c2ce810f24251 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Mon, 27 Mar 2023 12:44:13 +0100 Subject: [PATCH 134/162] remove RecordSerializer --- docs/src/main/mdoc/modules.md | 10 +-- .../scala/fs2/kafka/ProducerSettings.scala | 14 ++-- .../scala/fs2/kafka/RecordSerializer.scala | 78 ------------------- .../src/main/scala/fs2/kafka/Serializer.scala | 7 +- .../fs2/kafka/ProducerSettingsSpec.scala | 14 ++-- .../fs2/kafka/RecordSerializerSpec.scala | 40 ---------- .../test/scala/fs2/kafka/SerializerSpec.scala | 4 - .../fs2/kafka/vulcan/AvroSerializer.scala | 56 ++++++------- .../fs2/kafka/vulcan/AvroSerializerSpec.scala | 19 +++-- .../scala/fs2/kafka/vulcan/PackageSpec.scala | 9 ++- 10 files changed, 67 insertions(+), 184 deletions(-) delete mode 100644 modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala delete mode 100644 modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala diff --git a/docs/src/main/mdoc/modules.md b/docs/src/main/mdoc/modules.md index 08de140eb..dbce0d1b5 100644 --- a/docs/src/main/mdoc/modules.md +++ b/docs/src/main/mdoc/modules.md @@ -53,11 +53,11 @@ We can then create a `Serializer` and `Deserializer` instance for `Person`. ```scala mdoc:silent import cats.effect.Resource -import fs2.kafka.{RecordSerializer, ValueDeserializer} +import fs2.kafka.{ValueSerializer, ValueDeserializer} import fs2.kafka.vulcan.{avroDeserializer, avroSerializer} -implicit val personSerializer: RecordSerializer[IO, Person] = - avroSerializer[Person].using(avroSettings) +implicit val personSerializer: Resource[IO, ValueSerializer[IO, Person]] = + avroSerializer[Person].forValue(avroSettings) implicit val personDeserializer: Resource[IO, ValueDeserializer[IO, Person]] = avroDeserializer[Person].forValue(avroSettings) @@ -123,8 +123,8 @@ We can then create multiple `Serializer`s and `Deserializer`s using the `AvroSet ```scala mdoc:silent avroSettingsSharedClient.map { avroSettings => - val personSerializer: RecordSerializer[IO, Person] = - avroSerializer[Person].using(avroSettings) + val personSerializer: Resource[IO, ValueSerializer[IO, Person]] = + avroSerializer[Person].forValue(avroSettings) val personDeserializer: Resource[IO, ValueDeserializer[IO, Person]] = avroDeserializer[Person].forValue(avroSettings) diff --git a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala index 9e0b4b096..985990524 100644 --- a/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala +++ b/modules/core/src/main/scala/fs2/kafka/ProducerSettings.scala @@ -334,28 +334,28 @@ object ProducerSettings { ) def apply[F[_], K, V]( - keySerializer: RecordSerializer[F, K], + keySerializer: Resource[F, KeySerializer[F, K]], valueSerializer: ValueSerializer[F, V] ): ProducerSettings[F, K, V] = create( - keySerializer = keySerializer.forKey, + keySerializer = keySerializer, valueSerializer = Resource.pure(valueSerializer) ) def apply[F[_], K, V]( keySerializer: KeySerializer[F, K], - valueSerializer: RecordSerializer[F, V] + valueSerializer: Resource[F, ValueSerializer[F, V]] ): ProducerSettings[F, K, V] = create( keySerializer = Resource.pure(keySerializer), - valueSerializer = valueSerializer.forValue + valueSerializer = valueSerializer ) def apply[F[_], K, V]( - implicit keySerializer: RecordSerializer[F, K], - valueSerializer: RecordSerializer[F, V] + implicit keySerializer: Resource[F, KeySerializer[F, K]], + valueSerializer: Resource[F, ValueSerializer[F, V]] ): ProducerSettings[F, K, V] = - create(keySerializer = keySerializer.forKey, valueSerializer = valueSerializer.forValue) + create(keySerializer, valueSerializer) implicit def producerSettingsShow[F[_], K, V]: Show[ProducerSettings[F, K, V]] = Show.fromToString diff --git a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala b/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala deleted file mode 100644 index 3ae0871a7..000000000 --- a/modules/core/src/main/scala/fs2/kafka/RecordSerializer.scala +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2018-2023 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka - -import cats.effect.Resource - -/** - * Serializer which may vary depending on whether a record - * key or value is being serialized, and which may require - * a creation effect. - */ -sealed abstract class RecordSerializer[F[_], A] { - - def forKey: Resource[F, KeySerializer[F, A]] - - def forValue: Resource[F, ValueSerializer[F, A]] - - /** - * Returns a new [[RecordSerializer]] instance applying the mapping function to key and value serializers - */ - final def transform[B]( - f: Serializer[F, A] => Serializer[F, B] - ): RecordSerializer[F, B] = - RecordSerializer.instance( - forKey = forKey.map(f.asInstanceOf[KeySerializer[F, A] => KeySerializer[F, B]]), - forValue = forValue.map(f.asInstanceOf[ValueSerializer[F, A] => ValueSerializer[F, B]]) - ) - - /** - * Returns a new [[RecordSerializer]] instance that will serialize key and value `Some` values - * using the specific [[Serializer]], and serialize `None` as `null`. - * - * See [[Serializer.option]] for more details. - */ - final def option: RecordSerializer[F, Option[A]] = - transform(_.option) -} - -object RecordSerializer { - def apply[F[_], A]( - implicit serializer: RecordSerializer[F, A] - ): RecordSerializer[F, A] = - serializer - - def const[F[_], A]( - serializer: => Resource[F, Serializer[F, A]] - ): RecordSerializer[F, A] = - RecordSerializer.instance( - forKey = serializer, - forValue = serializer - ) - - def instance[F[_], A]( - forKey: => Resource[F, KeySerializer[F, A]], - forValue: => Resource[F, ValueSerializer[F, A]] - ): RecordSerializer[F, A] = { - def _forKey: Resource[F, KeySerializer[F, A]] = forKey - def _forValue: Resource[F, ValueSerializer[F, A]] = forValue - - new RecordSerializer[F, A] { - override def forKey: Resource[F, KeySerializer[F, A]] = - _forKey - - override def forValue: Resource[F, ValueSerializer[F, A]] = - _forValue - - override def toString: String = - "Serializer.Record$" + System.identityHashCode(this) - } - } - - implicit def lift[F[_], A](implicit serializer: Serializer[F, A]): RecordSerializer[F, A] = - RecordSerializer.const(Resource.pure(serializer)) -} diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 6b834c6bf..3ab16a460 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -7,8 +7,9 @@ package fs2.kafka import cats.Contravariant -import cats.effect.Sync +import cats.effect.{Resource, Sync} import cats.syntax.all._ + import java.nio.charset.{Charset, StandardCharsets} import java.util.UUID @@ -260,4 +261,8 @@ object GenericSerializer { implicit def uuid[F[_]](implicit F: Sync[F]): Serializer[F, UUID] = Serializer.string[F].contramap(_.toString) + + implicit def resource[T <: KeyOrValue, F[_], A]( + implicit ser: GenericSerializer[T, F, A] + ): Resource[F, GenericSerializer[T, F, A]] = Resource.pure(ser) } diff --git a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala index a520fa1c4..6e6e4f7d7 100644 --- a/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/ProducerSettingsSpec.scala @@ -6,7 +6,7 @@ package fs2.kafka -import cats.effect.IO +import cats.effect.{IO, Resource} import cats.effect.unsafe.implicits.global import cats.syntax.all._ import org.apache.kafka.clients.producer.ProducerConfig @@ -146,12 +146,12 @@ final class ProducerSettingsSpec extends BaseSpec { it("should be able to create with and without serializer creation effects") { val serializer = Serializer[IO, String] - val recordSerializer = RecordSerializer.lift(serializer) + val serializerResource: Resource[IO, Serializer[IO, String]] = Resource.pure(serializer) ProducerSettings(serializer, serializer) - ProducerSettings(recordSerializer, serializer) - ProducerSettings(serializer, recordSerializer) - ProducerSettings(recordSerializer, recordSerializer) + ProducerSettings(serializerResource, serializer) + ProducerSettings(serializer, serializerResource) + ProducerSettings(serializerResource, serializerResource) } it("should be able to implicitly create with and without serializer creation effects") { @@ -159,8 +159,8 @@ final class ProducerSettingsSpec extends BaseSpec { Serializer[IO, String] .mapBytes(identity) - implicit val serializer: RecordSerializer[IO, String] = - RecordSerializer.lift(serializerInstance) + implicit val serializerResource: Resource[IO, Serializer[IO, String]] = + Resource.pure(serializerInstance) ProducerSettings[IO, Int, Int] ProducerSettings[IO, String, Int].keySerializer diff --git a/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala deleted file mode 100644 index 5852a251c..000000000 --- a/modules/core/src/test/scala/fs2/kafka/RecordSerializerSpec.scala +++ /dev/null @@ -1,40 +0,0 @@ -package fs2.kafka - -import cats.effect.IO - -class RecordSerializerSpec extends BaseSpec { - - import cats.effect.unsafe.implicits.global - - describe("RecordSerializer#transform") { - it("should transform the RecordSerializer applying the function to inner Serializers") { - - val intRecordSer: RecordSerializer[IO, Int] = - RecordSerializer - .lift(Serializer[IO, String]) - .transform(_.contramap(_.toString)) - - intRecordSer.forKey - .use(_.serialize("T1", Headers.empty, 1)) - .unsafeRunSync() shouldBe "1".getBytes - } - } - - describe("RecordSerializer#option") { - it("should transform the RecordSerializer[F, T] to RecordSerializer[F, Option[T]]") { - - val optStrRecordSer: RecordSerializer[IO, Option[String]] = - RecordSerializer - .lift(Serializer[IO, String]) - .option - - optStrRecordSer.forKey - .use(_.serialize("T1", Headers.empty, Some("1"))) - .unsafeRunSync() shouldBe "1".getBytes - - optStrRecordSer.forKey - .use(_.serialize("T1", Headers.empty, None)) - .unsafeRunSync() shouldBe null - } - } -} diff --git a/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala b/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala index e224b42ab..2daf71e21 100644 --- a/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/SerializerSpec.scala @@ -248,10 +248,6 @@ final class SerializerSpec extends BaseCatsSpec { assert(Serializer[IO, Int].toString startsWith "Serializer$") } - test("Serializer.Record#toString") { - assert(RecordSerializer[IO, Int].toString startsWith "Serializer.Record$") - } - def roundtrip[A: Arbitrary: Eq]( serializer: Serializer[IO, A], deserializer: Deserializer[IO, A] diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index 7112fdcd1..9bcfe4282 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -8,41 +8,41 @@ package fs2.kafka.vulcan import _root_.vulcan.Codec import cats.effect.Sync -import fs2.kafka.{RecordSerializer, Serializer} +import fs2.kafka.{KeySerializer, Serializer, ValueSerializer} import cats.effect.kernel.Resource final class AvroSerializer[A] private[vulcan] ( private val codec: Codec[A] ) extends AnyVal { - def using[F[_]]( - settings: AvroSettings[F] - )(implicit F: Sync[F]): RecordSerializer[F, A] = { - def createSerializer(isKey: Boolean): Resource[F, Serializer[F, A]] = - codec.schema match { - case Left(e) => Resource.pure(Serializer.fail(e.throwable)) - case Right(writerSchema) => - Resource - .make(settings.createAvroSerializer(isKey, Some(writerSchema))) { - case (ser, _) => F.delay(ser.close()) - } - .map { - case (serializer, _) => - Serializer.instance { (topic, _, a) => - F.defer { - codec.encode(a) match { - case Right(value) => F.pure(serializer.serialize(topic, value)) - case Left(error) => F.raiseError(error.throwable) - } + + def forKey[F[_]: Sync](settings: AvroSettings[F]): Resource[F, KeySerializer[F, A]] = + create(isKey = true, settings) + + def forValue[F[_]: Sync](settings: AvroSettings[F]): Resource[F, ValueSerializer[F, A]] = + create(isKey = false, settings) + + private def create[F[_]](isKey: Boolean, settings: AvroSettings[F])( + implicit F: Sync[F] + ): Resource[F, Serializer[F, A]] = + codec.schema match { + case Left(e) => Resource.pure(Serializer.fail(e.throwable)) + case Right(writerSchema) => + Resource + .make(settings.createAvroSerializer(isKey, Some(writerSchema))) { + case (ser, _) => F.delay(ser.close()) + } + .map { + case (serializer, _) => + Serializer.instance { (topic, _, a) => + F.defer { + codec.encode(a) match { + case Right(value) => F.pure(serializer.serialize(topic, value)) + case Left(error) => F.raiseError(error.throwable) } } - } - } - - RecordSerializer.instance( - forKey = createSerializer(true), - forValue = createSerializer(false) - ) - } + } + } + } override def toString: String = "AvroSerializer$" + System.identityHashCode(this) diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala index cbdf96b97..444e966bd 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/AvroSerializerSpec.scala @@ -16,17 +16,16 @@ import vulcan.Codec final class AvroSerializerSpec extends AnyFunSpec { describe("AvroSerializer") { it("can create a serializer") { - val serializer = - AvroSerializer[Int].using(avroSettings) + val forKey = AvroSerializer[Int].forKey(avroSettings) + val forValue = AvroSerializer[Int].forValue(avroSettings) - assert(serializer.forKey.use(IO.pure).attempt.unsafeRunSync().isRight) - assert(serializer.forValue.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(forKey.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(forValue.use(IO.pure).attempt.unsafeRunSync().isRight) } it("auto-registers union schemas") { (avroSerializer[Either[Int, Boolean]] - .using(avroSettings) - .forValue + .forValue(avroSettings) .use( _.serialize( "test-union-topic", @@ -46,11 +45,11 @@ final class AvroSerializerSpec extends AnyFunSpec { val codec: Codec[BigDecimal] = Codec.decimal(-1, -1) - val serializer = - avroSerializer(codec).using(avroSettings) + val forKey = avroSerializer(codec).forKey(avroSettings) + val forValue = avroSerializer(codec).forKey(avroSettings) - assert(serializer.forKey.use(IO.pure).attempt.unsafeRunSync().isRight) - assert(serializer.forValue.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(forKey.use(IO.pure).attempt.unsafeRunSync().isRight) + assert(forValue.use(IO.pure).attempt.unsafeRunSync().isRight) } it("toString") { diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index 356a95093..6d31e2f23 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -20,7 +20,8 @@ import _root_.vulcan.Codec final class PackageSpec extends AnyFunSpec { describe("avroSerializer") { it("should be available given explicit settings") { - avroSerializer[Test].using(avroSettings) + avroSerializer[Test].forKey(avroSettings) + avroSerializer[Test].forValue(avroSettings) } } @@ -34,7 +35,7 @@ final class PackageSpec extends AnyFunSpec { describe("avroSerializer/avroDeserializer") { it("should be able to do roundtrip serialization") { ( - avroSerializer[Either[Test, Int]].using(avroSettings).forValue, + avroSerializer[Either[Test, Int]].forValue(avroSettings), avroDeserializer[Either[Test, Int]].forValue(avroSettings) ).parTupled .use { @@ -51,7 +52,7 @@ final class PackageSpec extends AnyFunSpec { it("should be able to do roundtrip serialization using compatible schemas") { ( - avroSerializer[Test2].using(avroSettings).forValue, + avroSerializer[Test2].forValue(avroSettings), avroDeserializer[Test].forValue(avroSettings) ).parTupled .use { @@ -68,7 +69,7 @@ final class PackageSpec extends AnyFunSpec { it("should error when reader and writer schemas have mismatching logical types") { ( - avroSerializer[Long].using(avroSettings).forValue, + avroSerializer[Long].forValue(avroSettings), avroDeserializer[Instant].forValue(avroSettings) ).parTupled .use { From 714ff95361aa047dc8ee7549288e3a10c81806a8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 9 Apr 2023 15:03:41 +0000 Subject: [PATCH 135/162] Update sbt-mima-plugin to 1.1.2 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 345db90be..e17f0e671 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") From e74480154f5976a5f21ad5d02a3e58bf095843d8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 9 Apr 2023 15:03:52 +0000 Subject: [PATCH 136/162] Update kafka-avro-serializer to 7.3.3 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 9c828e09e..f65445330 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.4.8" val catsVersion = "2.6.1" -val confluentVersion = "7.3.2" +val confluentVersion = "7.3.3" val fs2Version = "3.6.1" From 26cdc781e8dde6ad53d4e4219af246761042d3d8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 9 Apr 2023 15:04:09 +0000 Subject: [PATCH 137/162] Update kafka-clients to 3.4.0 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 9c828e09e..7656d5380 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.3.2" val fs2Version = "3.6.1" -val kafkaVersion = "3.3.2" +val kafkaVersion = "3.4.0" val testcontainersScalaVersion = "0.40.14" From 5e72b1388b133f169cdfebe72c7af442911c8537 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Tue, 11 Apr 2023 14:18:55 +0100 Subject: [PATCH 138/162] Post-3.0 release build updates --- README.md | 10 +++++----- build.sbt | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index ea90eba24..a232ff66a 100644 --- a/README.md +++ b/README.md @@ -9,11 +9,11 @@ For the latest stable version, see the badge at the top of this page. If your project uses cats-effect 2 you should instead use the latest release from the 1.x series. ### Version matrix -|fs2-kafka|scala|cats-effect/fs2|kafka-clients| -|---------|-----|---------------|-------------| -|3.x (milestones)|2.13, 3.1+|3.x|3.x| -|2.x|2.12, 2.13, 3.1+|3.x|2.x| -|1.x|2.12, 2.13, 3.0+|2.x|2.x| +|fs2-kafka| scala |cats-effect/fs2|kafka-clients| +|---------|------------------|---------------|-------------| +|3.x| 2.12, 2.13, 3.2+ |3.x|3.x| +|2.x| 2.12, 2.13, 3.2+ |3.x|2.x| +|1.x| 2.12, 2.13, 3.0+ |2.x|2.x| For further details, see the [microsite](https://fd4s.github.io/fs2-kafka/docs/overview). diff --git a/build.sbt b/build.sbt index 7ca72e6b9..c09f62251 100644 --- a/build.sbt +++ b/build.sbt @@ -215,7 +215,7 @@ ThisBuild / githubWorkflowPublishTargetBranches := ThisBuild / githubWorkflowPublish := Seq( WorkflowStep.Sbt( - List("tlRelease"), // For 3.0 release: List("tlRelease", "docs/docusaurusPublishGhpages"), + List("tlRelease", "docs/docusaurusPublishGhpages"), env = Map( "GIT_DEPLOY_KEY" -> "${{ secrets.GIT_DEPLOY_KEY }}", "PGP_PASSPHRASE" -> "${{ secrets.PGP_PASSPHRASE }}", @@ -347,7 +347,7 @@ addCommandsAlias( List( "+clean", "+test", - //"+mimaReportBinaryIssues", + "+mimaReportBinaryIssues", "+scalafmtCheck", "scalafmtSbtCheck", "+headerCheck", @@ -361,7 +361,7 @@ addCommandsAlias( List( "clean", "test", - // "mimaReportBinaryIssues", + "mimaReportBinaryIssues", "scalafmtCheck", "scalafmtSbtCheck", "headerCheck", From 567cb13a60901e0f26857589054954b1a0f7dcd0 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Tue, 11 Apr 2023 14:31:51 +0100 Subject: [PATCH 139/162] Update GH workflow --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c95ee814d..ef098e193 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -171,4 +171,4 @@ jobs: GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt '++ ${{ matrix.scala }}' tlRelease + run: sbt '++ ${{ matrix.scala }}' tlRelease docs/docusaurusPublishGhpages From 36dde863bac50b8a55182ead4d6cd4d22c36e1e3 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Wed, 12 Apr 2023 11:19:18 +0100 Subject: [PATCH 140/162] Update version matrix --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a232ff66a..45043d21f 100644 --- a/README.md +++ b/README.md @@ -9,11 +9,11 @@ For the latest stable version, see the badge at the top of this page. If your project uses cats-effect 2 you should instead use the latest release from the 1.x series. ### Version matrix -|fs2-kafka| scala |cats-effect/fs2|kafka-clients| -|---------|------------------|---------------|-------------| -|3.x| 2.12, 2.13, 3.2+ |3.x|3.x| -|2.x| 2.12, 2.13, 3.2+ |3.x|2.x| -|1.x| 2.12, 2.13, 3.0+ |2.x|2.x| +|fs2-kafka| scala |cats-effect/fs2|kafka-clients|status| +|---------|------------------|---------------|-------------|------| +|3.x| 2.12, 2.13, 3.2+ |3.x|3.x|Current| +|2.x| 2.12, 2.13, 3.2+ |3.x|2.x|Deprecated| +|1.x| 2.12, 2.13, 3.2+ |2.x|2.x|End-of-life| For further details, see the [microsite](https://fd4s.github.io/fs2-kafka/docs/overview). From 2841d5cb0167339db7f084a7ed7e603a47a5d55f Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 23 Apr 2023 15:05:53 +0000 Subject: [PATCH 141/162] Update sbt-typelevel to 0.4.20 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e17f0e671..4eccbc430 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,4 +4,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.19") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.20") From d3ffad6fa339a1384f36e9d7e52f1ae64d6ca2cf Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 23 Apr 2023 15:07:36 +0000 Subject: [PATCH 142/162] Run prePR with sbt-typelevel Executed command: sbt tlPrePrBotHook --- .github/workflows/ci.yml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c95ee814d..d0ff8cdfa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -172,3 +172,37 @@ jobs: SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} run: sbt '++ ${{ matrix.scala }}' tlRelease + + validate-steward: + name: Validate Steward Config + strategy: + matrix: + os: [ubuntu-latest] + scala: [2.13.6] + java: [temurin@11] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout current branch (fast) + uses: actions/checkout@v3 + + - name: Download Java (temurin@17) + id: download-java-temurin-17 + if: matrix.java == 'temurin@17' + uses: typelevel/download-java@v2 + with: + distribution: temurin + java-version: 17 + + - name: Setup Java (temurin@17) + if: matrix.java == 'temurin@17' + uses: actions/setup-java@v3 + with: + distribution: jdkfile + java-version: 17 + jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} + + - uses: coursier/setup-action@v1 + with: + apps: scala-steward + + - run: scala-steward validate-repo-config .scala-steward.conf From 0bc8146d976525458632079cbf109c3aef97f221 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 25 Apr 2023 14:49:34 +0000 Subject: [PATCH 143/162] Revert commit(s) d3ffad6f, 2841d5cb --- .github/workflows/ci.yml | 34 ---------------------------------- project/plugins.sbt | 2 +- 2 files changed, 1 insertion(+), 35 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d0ff8cdfa..c95ee814d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -172,37 +172,3 @@ jobs: SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} run: sbt '++ ${{ matrix.scala }}' tlRelease - - validate-steward: - name: Validate Steward Config - strategy: - matrix: - os: [ubuntu-latest] - scala: [2.13.6] - java: [temurin@11] - runs-on: ${{ matrix.os }} - steps: - - name: Checkout current branch (fast) - uses: actions/checkout@v3 - - - name: Download Java (temurin@17) - id: download-java-temurin-17 - if: matrix.java == 'temurin@17' - uses: typelevel/download-java@v2 - with: - distribution: temurin - java-version: 17 - - - name: Setup Java (temurin@17) - if: matrix.java == 'temurin@17' - uses: actions/setup-java@v3 - with: - distribution: jdkfile - java-version: 17 - jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} - - - uses: coursier/setup-action@v1 - with: - apps: scala-steward - - - run: scala-steward validate-repo-config .scala-steward.conf diff --git a/project/plugins.sbt b/project/plugins.sbt index 4eccbc430..e17f0e671 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,4 +4,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.20") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.19") From f5fbe5e8c26b24037d29b66bb0b974f0d09b4130 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 25 Apr 2023 14:49:45 +0000 Subject: [PATCH 144/162] Update sbt-typelevel to 0.4.20 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e17f0e671..4eccbc430 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,4 +4,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.19") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.20") From 1d97b0f60c37ad1789b3cfe4fde5a17d0aac2743 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 25 Apr 2023 14:51:22 +0000 Subject: [PATCH 145/162] Run prePR with sbt-typelevel Executed command: sbt tlPrePrBotHook --- .github/workflows/ci.yml | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ef098e193..4ebb4faf5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -172,3 +172,37 @@ jobs: SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} run: sbt '++ ${{ matrix.scala }}' tlRelease docs/docusaurusPublishGhpages + + validate-steward: + name: Validate Steward Config + strategy: + matrix: + os: [ubuntu-latest] + scala: [2.13.6] + java: [temurin@11] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout current branch (fast) + uses: actions/checkout@v3 + + - name: Download Java (temurin@17) + id: download-java-temurin-17 + if: matrix.java == 'temurin@17' + uses: typelevel/download-java@v2 + with: + distribution: temurin + java-version: 17 + + - name: Setup Java (temurin@17) + if: matrix.java == 'temurin@17' + uses: actions/setup-java@v3 + with: + distribution: jdkfile + java-version: 17 + jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} + + - uses: coursier/setup-action@v1 + with: + apps: scala-steward + + - run: scala-steward validate-repo-config .scala-steward.conf From ab2a5054f372ceef5c4a6577978f2c1898394c6c Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 5 May 2023 16:07:11 +0100 Subject: [PATCH 146/162] Fix merge issue --- .../src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index b98b991ce..30e51ae1b 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -103,7 +103,7 @@ private[kafka] final class KafkaConsumerActor[F[_], K, V]( private[this] def manualCommitSync(request: Request.ManualCommitSync[F]): F[Unit] = { val commit = - withConsumer.blocking(_.commitSync(request.offsets.asJava, settings.commitTimeout.asJava)) + withConsumer.blocking(_.commitSync(request.offsets.asJava, settings.commitTimeout.toJava)) commit.attempt >>= request.callback } From 436b00b474eb924ce42aa64b10ad099e7ca2b1ad Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 5 May 2023 16:09:13 +0100 Subject: [PATCH 147/162] Scalafmt --- .../core/src/main/scala/fs2/kafka/KafkaAdminClient.scala | 1 - .../src/main/scala/fs2/kafka/KafkaProducerConnection.scala | 1 - modules/core/src/main/scala/fs2/kafka/Serializer.scala | 1 - modules/core/src/main/scala/fs2/kafka/package.scala | 6 ------ .../core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala | 1 - .../src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala | 1 - .../src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala | 1 - .../src/test/scala/fs2/kafka/vulcan/PackageSpec.scala | 1 - 8 files changed, 13 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala index 0ba9d4947..01020ee29 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaAdminClient.scala @@ -330,7 +330,6 @@ object KafkaAdminClient { partitions: G[TopicPartition] ): ListConsumerGroupOffsetsForPartitions[F] = new ListConsumerGroupOffsetsForPartitions[F] { - private[this] val groupOffsets = Map( groupId -> new ListConsumerGroupOffsetsSpec().topicPartitions(partitions.asJava) ).asJava diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala index 3483134e7..0147a3357 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducerConnection.scala @@ -145,7 +145,6 @@ object KafkaProducerConnection { override def partitionsFor(topic: String): G[List[PartitionInfo]] = withProducer.blocking { _.partitionsFor(topic).asScala.toList } - } } diff --git a/modules/core/src/main/scala/fs2/kafka/Serializer.scala b/modules/core/src/main/scala/fs2/kafka/Serializer.scala index 9ebdd5d07..2c2a55e42 100644 --- a/modules/core/src/main/scala/fs2/kafka/Serializer.scala +++ b/modules/core/src/main/scala/fs2/kafka/Serializer.scala @@ -52,7 +52,6 @@ sealed abstract class GenericSerializer[-T <: KeyOrValue, F[_], A] { * support for effect types. */ object GenericSerializer { - def apply[F[_], A](implicit serializer: Serializer[F, A]): Serializer[F, A] = serializer /** Alias for [[Serializer#identity]]. */ diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 9d66bcd4d..6f5999b8e 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -80,7 +80,6 @@ package object kafka { } package kafka { - /** Phantom types to indicate whether a [[Serializer]]/[[Deserializer]] if for keys, values, or both */ sealed trait KeyOrValue @@ -88,11 +87,9 @@ package kafka { sealed trait Value extends KeyOrValue } package kafka { - import cats.Foldable object ProducerRecords { - def apply[F[+_], K, V]( records: F[ProducerRecord[K, V]] )( @@ -101,11 +98,9 @@ package kafka { def one[K, V](record: ProducerRecord[K, V]): ProducerRecords[K, V] = Chunk.singleton(record) - } object TransactionalProducerRecords { - @deprecated("this is now an identity operation", "3.0.0-M5") def apply[F[_], K, V]( chunk: Chunk[CommittableProducerRecords[F, K, V]] @@ -119,6 +114,5 @@ package kafka { record: CommittableProducerRecords[F, K, V] ): TransactionalProducerRecords[F, K, V] = Chunk.singleton(record) - } } diff --git a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala index e835e1547..dc0ac58ad 100644 --- a/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/internal/SyntaxSpec.scala @@ -19,7 +19,6 @@ import org.apache.kafka.common.internals.KafkaFutureImpl import java.util.concurrent.CancellationException final class SyntaxSpec extends BaseSpec { - describe("Map#filterKeysStrictValuesList") { it("should be the same as toList.collect") { forAll { (m: Map[Int, Int], p: Int => Boolean) => diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala index 1289803f5..ec1eb3e1d 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroDeserializer.scala @@ -16,7 +16,6 @@ import java.nio.ByteBuffer final class AvroDeserializer[A] private[vulcan] ( private val codec: Codec[A] ) extends AnyVal { - def forKey[F[_]: Sync](settings: AvroSettings[F]): Resource[F, KeyDeserializer[F, A]] = createDeserializer(isKey = true, settings) diff --git a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala index 9bcfe4282..89b9c2482 100644 --- a/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala +++ b/modules/vulcan/src/main/scala/fs2/kafka/vulcan/AvroSerializer.scala @@ -14,7 +14,6 @@ import cats.effect.kernel.Resource final class AvroSerializer[A] private[vulcan] ( private val codec: Codec[A] ) extends AnyVal { - def forKey[F[_]: Sync](settings: AvroSettings[F]): Resource[F, KeySerializer[F, A]] = create(isKey = true, settings) diff --git a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala index 6d31e2f23..4c07d9faf 100644 --- a/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala +++ b/modules/vulcan/src/test/scala/fs2/kafka/vulcan/PackageSpec.scala @@ -59,7 +59,6 @@ final class PackageSpec extends AnyFunSpec { case (serializer, deserializer) => val test2 = Test2("test", 42) for { - serialized <- serializer.serialize("topic2", Headers.empty, test2) deserialized <- deserializer.deserialize("topic2", Headers.empty, serialized) } yield assert(deserialized == Test("test")) From f5fa6dcd1f66b05a254878a00fce7dd2102bd0b6 Mon Sep 17 00:00:00 2001 From: Ben Plommer Date: Fri, 5 May 2023 16:28:06 +0100 Subject: [PATCH 148/162] Mima exclusion --- build.sbt | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/build.sbt b/build.sbt index b25b00b58..a8921bf72 100644 --- a/build.sbt +++ b/build.sbt @@ -263,6 +263,15 @@ lazy val publishSettings = ) ) +ThisBuild / mimaBinaryIssueFilters ++= { + import com.typesafe.tools.mima.core._ + // format: off + Seq( + ProblemFilters.exclude[Problem]("fs2.kafka.internal.*") + ) + // format: on +} + lazy val noMimaSettings = Seq(mimaPreviousArtifacts := Set()) lazy val noPublishSettings = From 5c788a4e05811f38a83ebb4e3d7a6c4cafbc31e4 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 27 May 2023 17:51:22 +0000 Subject: [PATCH 149/162] Update sbt to 1.8.3 in series/3.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index f344c1483..ef3d26620 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.8.2 +sbt.version = 1.8.3 From de3c8097fab561c24a5ac5037bf489fd4995d5f4 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 12 Jun 2023 15:08:54 +0000 Subject: [PATCH 150/162] Update kafka-clients to 3.4.1 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a8921bf72..90d4f1a78 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ val confluentVersion = "7.3.3" val fs2Version = "3.6.1" -val kafkaVersion = "3.4.0" +val kafkaVersion = "3.4.1" val testcontainersScalaVersion = "0.40.15" From 9383955dbbb4de0b047d3ec89c319e05adfbcc78 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 14 Jul 2023 17:34:19 +0000 Subject: [PATCH 151/162] Update kafka-avro-serializer to 7.3.4 in series/3.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a8921bf72..44d31de88 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ val catsEffectVersion = "3.4.9" val catsVersion = "2.6.1" -val confluentVersion = "7.3.3" +val confluentVersion = "7.3.4" val fs2Version = "3.6.1" From dc345d3d4242f789c33a734e912b9d92c87c0a03 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 30 Jul 2023 14:55:02 +0000 Subject: [PATCH 152/162] Update sbt-mima-plugin to 1.1.3 in series/3.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4eccbc430..ffc2b0428 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.3") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") From f69bc2050de6241126b3b139c023a6e50d0db5b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Abecasis?= Date: Wed, 6 Sep 2023 18:23:37 +0100 Subject: [PATCH 153/162] Retire FakeFiber FakeFiber was used by KafkaConsumer to manage: 1. the consumerActor fiber, processing requests (including polls); 2. the pollScheduler fiber, scheduling poll requests (subject to backpressure); 3. a fiber combining the above two fibers. Compared to a regular fiber, FakeFiber offered a method to combine two fibers by racing them one against the other. The semantics were similar to the race method for effects, but operating at the fiber level. In KafkaConsumer, FakeFiber was used with cancellation effects that returned immediately (i.e., fiber.cancel.start.void). In addition the fiber outcome was relayed to KafkaConsumer.awaitTermination. With this change FakeFiber is replaced with an Async[F].race of the underlying effects. This is managed in the new method startBackgroundConsumer, which builds upon effects assembled by runConsumerActor and runPollScheduler. These effects are unwrapped from the previous fibers. As before, cancellation of the consumer effect is only waited on in awaitTermination, where any errors are propagated. Compared to the original behaviour of FakeFiber.combine, starting with cats-effect 3.5.0, cancellation of one of the consumer effects will lead to cancellation of both, as per changes introduced with https://github.com/typelevel/cats-effect/pull/3453. I'm not sure how cancelation would come into play here, but the behaviour change looks appropriate: without one of the racing fibers KafkaConsumer would not be functional (no polls scheduled, or no requests/polls processed). --- .../main/scala/fs2/kafka/KafkaConsumer.scala | 79 +++++++++++-------- .../scala/fs2/kafka/internal/FakeFiber.scala | 40 ---------- 2 files changed, 48 insertions(+), 71 deletions(-) delete mode 100644 modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala index 3691697fa..9c6e02f2d 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaConsumer.scala @@ -79,47 +79,65 @@ sealed abstract class KafkaConsumer[F[_], K, V] with KafkaConsumerLifecycle[F] object KafkaConsumer { - private def spawnRepeating[F[_]: Concurrent, A](fa: F[A]): Resource[F, FakeFiber[F]] = - Resource.make { - Deferred[F, Either[Throwable, Unit]].flatMap { deferred => - fa.foreverM[Unit] - .guaranteeCase { - case Outcome.Errored(e) => deferred.complete(Left(e)).void - case _ => deferred.complete(Right(())).void - } - .start - .map(fiber => FakeFiber(deferred.get.rethrow, fiber.cancel.start.void)) - } - }(_.cancel) - - private def startConsumerActor[F[_], K, V]( + /** + * Processes requests from the queue, if there are pending requests, otherwise waits for the next poll. + * + * In particular, any newly queued requests may wait for up to pollInterval, and for the next poll to complete. + * + * The resulting effect runs forever, until canceled. + */ + private def runConsumerActor[F[_], K, V]( requests: QueueSource[F, Request[F, K, V]], polls: QueueSource[F, Request.Poll[F]], actor: KafkaConsumerActor[F, K, V] )( implicit F: Async[F] - ): Resource[F, FakeFiber[F]] = - spawnRepeating { - OptionT(requests.tryTake) - .getOrElseF(polls.take.widen) - .flatMap(actor.handle(_)) - } + ): F[Unit] = + OptionT(requests.tryTake) + .getOrElseF(polls.take.widen) + .flatMap(actor.handle(_)) + .foreverM[Unit] - private def startPollScheduler[F[_], K, V]( + /** + * Schedules polls every pollInterval to be handled by runConsumerActor. + * + * The polls queue is assumed bounded to provide backpressure. + * + * The resulting effect runs forever, until canceled. + */ + private def runPollScheduler[F[_], K, V]( polls: QueueSink[F, Request.Poll[F]], pollInterval: FiniteDuration )( implicit F: Temporal[F] - ): Resource[F, FakeFiber[F]] = - spawnRepeating { - polls.offer(Request.poll) >> F.sleep(pollInterval) - } + ): F[Unit] = + polls + .offer(Request.poll) + .andWait(pollInterval) + .foreverM[Unit] + + private def startBackgroundConsumer[F[_], K, V]( + requests: QueueSource[F, Request[F, K, V]], + polls: Queue[F, Request.Poll[F]], + actor: KafkaConsumerActor[F, K, V], + pollInterval: FiniteDuration + )( + implicit F: Async[F] + ): Resource[F, Fiber[F, Throwable, Unit]] = + Resource.make { + F.race( + runConsumerActor(requests, polls, actor), + runPollScheduler(polls, pollInterval) + ) + .void + .start + }(_.cancel.start.void) private def createKafkaConsumer[F[_], K, V]( requests: QueueSink[F, Request[F, K, V]], settings: ConsumerSettings[F, K, V], actor: KafkaConsumerActor[F, K, V], - fiber: FakeFiber[F], + fiber: Fiber[F, Throwable, Unit], streamIdRef: Ref[F, StreamId], id: Int, withConsumer: WithConsumer[F], @@ -602,9 +620,9 @@ object KafkaConsumer { override def toString: String = "KafkaConsumer$" + id - override def terminate: F[Unit] = fiber.cancel + override def terminate: F[Unit] = fiber.cancel.start.void - override def awaitTermination: F[Unit] = fiber.join + override def awaitTermination: F[Unit] = fiber.joinWithUnit } /** @@ -651,13 +669,12 @@ object KafkaConsumer { withConsumer = withConsumer ) } - actorFiber <- startConsumerActor(requests, polls, actor) - polls <- startPollScheduler(polls, settings.pollInterval) + fiber <- startBackgroundConsumer(requests, polls, actor, settings.pollInterval) } yield createKafkaConsumer( requests, settings, actor, - actorFiber.combine(polls), + fiber, streamId, id, withConsumer, diff --git a/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala b/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala deleted file mode 100644 index 0031640ec..000000000 --- a/modules/core/src/main/scala/fs2/kafka/internal/FakeFiber.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright 2018-2023 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package fs2.kafka.internal - -import cats.effect.{Concurrent, Outcome} -import cats.syntax.all._ -import cats.effect.syntax.all._ - -/** A wrapper for `cancel` and `join` effects used to terminate and await - * termination of running processes, ported from `Fiber` in cats-effect 2. - */ -private[kafka] final case class FakeFiber[F[_]](join: F[Unit], cancel: F[Unit])( - implicit F: Concurrent[F] -) { - def combine(that: FakeFiber[F]): FakeFiber[F] = { - val fa0join = - this.join.guaranteeCase { - case Outcome.Canceled() => F.unit - case _ => that.cancel - } - - val fb0join = - that.join.guaranteeCase { - case Outcome.Canceled() => F.unit - case _ => this.cancel - } - - FakeFiber( - F.racePair(fa0join, fb0join).flatMap { - case Left((a, fiberB)) => F.map2(a.embedNever, fiberB.joinWithNever)((_, _) => ()) - case Right((fiberA, b)) => F.map2(fiberA.joinWithNever, b.embedNever)((_, _) => ()) - }, - F.map2(this.cancel, that.cancel)((_, _) => ()) - ) - } -} From a48e60d7c3b4d28da70840577a5c17817754af17 Mon Sep 17 00:00:00 2001 From: Peter Hazell Date: Fri, 11 Aug 2023 10:10:23 +0100 Subject: [PATCH 154/162] Remove VerifiableProperties issue workaround --- .../main/scala-3/kafka.util/VerifiableProperties.scala | 10 ---------- 1 file changed, 10 deletions(-) delete mode 100644 modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala diff --git a/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala b/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala deleted file mode 100644 index 39a34fbdd..000000000 --- a/modules/vulcan/src/main/scala-3/kafka.util/VerifiableProperties.scala +++ /dev/null @@ -1,10 +0,0 @@ -/* - * Copyright 2018-2023 OVO Energy Limited - * - * SPDX-License-Identifier: Apache-2.0 - */ - -package kafka.utils - -// Workaround for https://github.com/lampepfl/dotty/issues/13523 and https://github.com/confluentinc/schema-registry/issues/553 -private class VerifiableProperties From 00d0b6184e404f5a96fed5262fd0cdd03aa0c4a1 Mon Sep 17 00:00:00 2001 From: "a.artigao" Date: Sat, 23 Sep 2023 09:00:52 +0200 Subject: [PATCH 155/162] Remove workaround fixed in SBT 1.3 --- project/PackagingTypePlugin.scala | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 project/PackagingTypePlugin.scala diff --git a/project/PackagingTypePlugin.scala b/project/PackagingTypePlugin.scala deleted file mode 100644 index d06916c9b..000000000 --- a/project/PackagingTypePlugin.scala +++ /dev/null @@ -1,9 +0,0 @@ -import sbt._ - -// workaround from https://github.com/sbt/sbt/issues/3618#issuecomment-424924293 -object PackagingTypePlugin extends AutoPlugin { - override val buildSettings = { - sys.props += "packaging.type" -> "jar" - Nil - } -} From 35b76d89d53d78708d6b3f905373d87d89ebfbfd Mon Sep 17 00:00:00 2001 From: "a.artigao" Date: Sat, 23 Sep 2023 10:05:45 +0200 Subject: [PATCH 156/162] SBT 1.9.x --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index ef3d26620..303541e50 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 1.8.3 +sbt.version = 1.9.6 From caa041129c3fcdd0623c131314a71fda6606d2f7 Mon Sep 17 00:00:00 2001 From: "a.artigao" Date: Sat, 23 Sep 2023 10:21:26 +0200 Subject: [PATCH 157/162] Fix MiMa missing class problem --- build.sbt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 8bf1d4691..60a973bc8 100644 --- a/build.sbt +++ b/build.sbt @@ -267,7 +267,8 @@ ThisBuild / mimaBinaryIssueFilters ++= { import com.typesafe.tools.mima.core._ // format: off Seq( - ProblemFilters.exclude[Problem]("fs2.kafka.internal.*") + ProblemFilters.exclude[Problem]("fs2.kafka.internal.*"), + ProblemFilters.exclude[MissingClassProblem]("kafka.utils.VerifiableProperties") ) // format: on } From 0dceaddf9ca733fd16ab129ccd8e03fbe7f3e292 Mon Sep 17 00:00:00 2001 From: "a.artigao" Date: Sun, 24 Sep 2023 10:15:19 +0200 Subject: [PATCH 158/162] Bump CE and fs2 to the latest version --- .github/workflows/ci.yml | 4 ++-- .scala-steward.conf | 12 ------------ build.sbt | 6 +++--- .../fs2/kafka/CommittableProducerRecords.scala | 2 +- .../main/scala/fs2/kafka/KafkaProducer.scala | 17 ++++++----------- .../fs2/kafka/internal/KafkaConsumerActor.scala | 4 ++-- .../core/src/main/scala/fs2/kafka/package.scala | 2 +- .../scala/fs2/kafka/KafkaProducerSpec.scala | 4 ++-- .../src/test/scala/fs2/kafka/KafkaSpec.scala | 2 +- .../kafka/TransactionalKafkaProducerSpec.scala | 10 +++++----- 10 files changed, 23 insertions(+), 40 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4ebb4faf5..e19843d28 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,12 +28,12 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.12.17, 2.13.10, 3.2.2] + scala: [2.12.17, 2.13.10, 3.3.1] java: [temurin@8, temurin@17] exclude: - scala: 2.12.17 java: temurin@17 - - scala: 3.2.2 + - scala: 3.3.1 java: temurin@17 runs-on: ${{ matrix.os }} steps: diff --git a/.scala-steward.conf b/.scala-steward.conf index 8e3e92e57..d6ef8e254 100644 --- a/.scala-steward.conf +++ b/.scala-steward.conf @@ -1,18 +1,6 @@ pullRequests.frequency = "14 days" updates.ignore = [{ - groupId = "org.typelevel", - artifactId="cats-effect" -},{ - groupId = "org.typelevel", - artifactId="cats-effect-laws" -},{ - groupId = "org.typelevel", - artifactId="cats-effect-testkit" -},{ - groupId = "co.fs2", - artifactId="fs2-core" -},{ groupId = "com.dimafeng" },{ groupId = "org.scalameta", diff --git a/build.sbt b/build.sbt index 60a973bc8..88b6ff55d 100644 --- a/build.sbt +++ b/build.sbt @@ -1,10 +1,10 @@ -val catsEffectVersion = "3.4.9" +val catsEffectVersion = "3.5.1" val catsVersion = "2.6.1" val confluentVersion = "7.3.4" -val fs2Version = "3.6.1" +val fs2Version = "3.9.2" val kafkaVersion = "3.4.1" @@ -18,7 +18,7 @@ val scala212 = "2.12.17" val scala213 = "2.13.10" -val scala3 = "3.2.2" +val scala3 = "3.3.1" ThisBuild / tlBaseVersion := "3.0" diff --git a/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala b/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala index 652aefc18..28226d34c 100644 --- a/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala +++ b/modules/core/src/main/scala/fs2/kafka/CommittableProducerRecords.scala @@ -58,7 +58,7 @@ object CommittableProducerRecords { records: G[ProducerRecord[K, V]], offset: CommittableOffset[F] )(implicit G: Foldable[G]): CommittableProducerRecords[F, K, V] = - chunk(Chunk.iterable(Foldable[G].toIterable(records)), offset) + chunk(Chunk.from(Foldable[G].toIterable(records)), offset) /** * Creates a new [[CommittableProducerRecords]] for producing exactly diff --git a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala index d62fce43c..01b769a9a 100644 --- a/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala +++ b/modules/core/src/main/scala/fs2/kafka/KafkaProducer.scala @@ -197,17 +197,12 @@ object KafkaProducer { else promise.failure(exception) } ) - }.as { - F.delay(promise.future).flatMap { fut => - F.executionContext.flatMap { implicit ec => - F.async[(ProducerRecord[K, V], RecordMetadata)] { cb => - F.delay(fut.onComplete(t => cb(t.toEither))).as(Some(F.unit)) - } - } - } - // TODO: replace the above with the following once CE3.5.0 is out - // F.fromFutureCancelable(F.delay(promise.future)) - } + }.map( + javaFuture => + F.fromFutureCancelable( + F.delay((promise.future, F.delay(javaFuture.cancel(true)).void)) + ) + ) } } diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index 30e51ae1b..183f885c8 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -152,7 +152,7 @@ private[kafka] final class KafkaConsumerActor[F[_], K, V]( val action = st.fetches.filterKeysStrictList(withRecords).traverse { case (partition, partitionFetches) => - val records = Chunk.vector(st.records(partition).toVector) + val records = Chunk.from(st.records(partition).toVector) partitionFetches.values.toList.traverse(_.completeRevoked(records)) } >> logging.log( RevokedFetchesWithRecords(st.records.filterKeysStrict(withRecords), newState) @@ -317,7 +317,7 @@ private[kafka] final class KafkaConsumerActor[F[_], K, V]( def completeFetches: F[Unit] = state.fetches.filterKeysStrictList(canBeCompleted).traverse_ { case (partition, fetches) => - val records = Chunk.vector(allRecords(partition).toVector) + val records = Chunk.from(allRecords(partition).toVector) fetches.values.toList.traverse_(_.completeRecords(records)) } diff --git a/modules/core/src/main/scala/fs2/kafka/package.scala b/modules/core/src/main/scala/fs2/kafka/package.scala index 6f5999b8e..f2b94436c 100644 --- a/modules/core/src/main/scala/fs2/kafka/package.scala +++ b/modules/core/src/main/scala/fs2/kafka/package.scala @@ -94,7 +94,7 @@ package kafka { records: F[ProducerRecord[K, V]] )( implicit F: Traverse[F] - ): ProducerRecords[K, V] = Chunk.iterable(Foldable[F].toIterable(records)) + ): ProducerRecords[K, V] = Chunk.from(Foldable[F].toIterable(records)) def one[K, V](record: ProducerRecord[K, V]): ProducerRecords[K, V] = Chunk.singleton(record) diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala index 4aefa5fc4..caeb3384f 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaProducerSpec.scala @@ -36,7 +36,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer.stream(producerSettings[IO]) _ <- Stream.eval(IO(producer.toString should startWith("KafkaProducer$"))) - (records, passthrough) <- Stream.chunk(Chunk.seq(toProduce).map { + (records, passthrough) <- Stream.chunk(Chunk.from(toProduce).map { case passthrough @ (key, value) => (ProducerRecords.one(ProducerRecord(topic, key, value)), passthrough) }) @@ -63,7 +63,7 @@ final class KafkaProducerSpec extends BaseKafkaSpec { (for { producer <- KafkaProducer[IO].stream(producerSettings[IO]) - records <- Stream.chunk(Chunk.seq(toProduce).map { + records <- Stream.chunk(Chunk.from(toProduce).map { case (key, value) => ProducerRecords.one(ProducerRecord(topic, key, value)) }) diff --git a/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala b/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala index d67ea5b0b..0bfe1a00c 100644 --- a/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/KafkaSpec.scala @@ -23,7 +23,7 @@ final class KafkaSpec extends BaseAsyncSpec { (for { ref <- Stream.eval(Ref[IO].of(Option.empty[Map[TopicPartition, OffsetAndMetadata]])) commit = (offsets: Map[TopicPartition, OffsetAndMetadata]) => ref.set(Some(offsets)) - offsets = Chunk.seq(exampleOffsets(commit)) + offsets = Chunk.from(exampleOffsets(commit)) _ <- Stream .chunk(offsets) .covary[IO] diff --git a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala index 1d1c8a1ed..db93f3dc2 100644 --- a/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala +++ b/modules/core/src/test/scala/fs2/kafka/TransactionalKafkaProducerSpec.scala @@ -79,7 +79,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { ) ) _ <- Stream.eval(IO(producer.toString should startWith("TransactionalKafkaProducer$"))) - (records, passthrough) <- Stream.chunk(Chunk.seq(toProduce)).zipWithIndex.map { + (records, passthrough) <- Stream.chunk(Chunk.from(toProduce)).zipWithIndex.map { case ((key, value), i) => val record = ProducerRecord(topic, key, value) @@ -185,7 +185,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { _ => IO.unit ) - records = Chunk.seq(0 to 100).map(i => CommittableProducerRecords(Chunk.empty, offsets(i))) + records = Chunk.from(0 to 100).map(i => CommittableProducerRecords(Chunk.empty, offsets(i))) results <- Stream.eval(producer.produce(records)) } yield { @@ -198,7 +198,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { private def testMultiple(topic: String, makeOffset: Option[Int => CommittableOffset[IO]]) = { createCustomTopic(topic, partitions = 3) val toProduce = - Chunk.seq((0 to 100).toList.map(n => s"key-$n" -> s"value-$n")) + Chunk.from((0 to 100).toList.map(n => s"key-$n" -> s"value-$n")) val toPassthrough = "passthrough" @@ -262,7 +262,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { withTopic { topic => createCustomTopic(topic, partitions = 3) val toProduce = - Chunk.seq((0 to 1000000).toList.map(n => s"key-$n" -> s"value-$n")) + Chunk.from((0 to 1000000).toList.map(n => s"key-$n" -> s"value-$n")) val result = (for { @@ -367,7 +367,7 @@ class TransactionalKafkaProducerSpec extends BaseKafkaSpec with EitherValues { _ => IO.unit ) } - records = Chunk.seq(recordsToProduce.zip(offsets)).map { + records = Chunk.from(recordsToProduce.zip(offsets)).map { case (record, offset) => CommittableProducerRecords.chunk( Chunk.singleton(record), From b82a5cdbec95993a541ba84aaa704653076eb26d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alan=20Artigao=20Carre=C3=B1o?= Date: Fri, 29 Sep 2023 20:26:47 +0200 Subject: [PATCH 159/162] Mark `series/2.x` as EOL (#1248) * Mark series/2.x as EOL * Missing dot in `README.md` --- .github/workflows/notes.yml | 11 ----------- README.md | 9 +++++---- docs/src/main/mdoc/certificates.md | 2 +- 3 files changed, 6 insertions(+), 16 deletions(-) delete mode 100644 .github/workflows/notes.yml diff --git a/.github/workflows/notes.yml b/.github/workflows/notes.yml deleted file mode 100644 index ef9df1846..000000000 --- a/.github/workflows/notes.yml +++ /dev/null @@ -1,11 +0,0 @@ -name: notes -on: - push: - branches: - - series/1.x -jobs: - update: - runs-on: ubuntu-latest - steps: - - run: echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> $GITHUB_ENV - - uses: release-drafter/release-drafter@v5 diff --git a/README.md b/README.md index 45043d21f..ddc070b50 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Latest version](https://index.scala-lang.org/fd4s/fs2-kafka/fs2-kafka/latest.svg?style=flat)](https://index.scala-lang.org/fd4s/fs2-kafka/fs2-kafka) ![License](https://img.shields.io/github/license/fd4s/fs2-kafka) -[![Gitter](https://img.shields.io/gitter/room/fd4s/fs2-kafka)](https://gitter.im/fd4s/fs2-kafka) +[![Discord](https://img.shields.io/discord/632277896739946517.svg?label=&logo=discord&logoColor=ffffff&color=404244&labelColor=6A7EC2)](https://discord.com/channels/632277896739946517/908001544052563979) ## Getting started @@ -11,8 +11,8 @@ For the latest stable version, see the badge at the top of this page. If your pr ### Version matrix |fs2-kafka| scala |cats-effect/fs2|kafka-clients|status| |---------|------------------|---------------|-------------|------| -|3.x| 2.12, 2.13, 3.2+ |3.x|3.x|Current| -|2.x| 2.12, 2.13, 3.2+ |3.x|2.x|Deprecated| +|3.x| 2.12, 2.13, 3.3+ |3.x|3.x|Current| +|2.x| 2.12, 2.13, 3.2+ |3.x|2.x|End-of-life| |1.x| 2.12, 2.13, 3.2+ |2.x|2.x|End-of-life| For further details, see the [microsite](https://fd4s.github.io/fs2-kafka/docs/overview). @@ -23,13 +23,14 @@ For further details, see the [microsite](https://fd4s.github.io/fs2-kafka/docs/o ## Contributing -Pull requests and feedback are welcome. Pull requests should usually target the `series/2.x` branch +Pull requests and feedback are welcome. Pull requests should usually target the `series/3.x` branch. ## Adopters FS2-Kafka is used in production by a number of companies, including: - [ITV](https://github.com/itv) - [OVO Energy](https://github.com/ovotech) +- [Stuart](https://github.com/StuartApp) ## Code of conduct diff --git a/docs/src/main/mdoc/certificates.md b/docs/src/main/mdoc/certificates.md index b2bd9520f..c80026ac4 100644 --- a/docs/src/main/mdoc/certificates.md +++ b/docs/src/main/mdoc/certificates.md @@ -5,7 +5,7 @@ title: Security & Certificates ## Security: certificates, trust stores, and passwords -The `KafkaCredentialStore` can be used to create the necessary trust stores and passwords to access kafka. +The `KafkaCredentialStore` can be used to create the necessary trust stores and passwords to access Kafka. The parameters passed in are string representations of the client private key, client certificate and service certificate. the `properties` field in `KafkaCredentialStore` can then be applied to From ab37b194a92290727c704e493a8221e312ca63a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alan=20Artigao=20Carre=C3=B1o?= Date: Fri, 29 Sep 2023 20:27:51 +0200 Subject: [PATCH 160/162] Update sbt-typelevel to 0.5.3 in series/3.x (#1247) * Update sbt-typelevel to 0.5.3 in series/3.x * Exclude some Scala 3 compiler options added in 0.5.3 --- .github/workflows/ci.yml | 176 +++++++++--------- build.sbt | 3 + .../scala/fs2/kafka/internal/syntax.scala | 2 +- project/plugins.sbt | 2 +- 4 files changed, 96 insertions(+), 87 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e19843d28..ad96e5f81 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,76 +15,59 @@ on: tags: [v*] env: - PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} - SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} - SONATYPE_CREDENTIAL_HOST: ${{ secrets.SONATYPE_CREDENTIAL_HOST }} - SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} - PGP_SECRET: ${{ secrets.PGP_SECRET }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + +concurrency: + group: ${{ github.workflow }} @ ${{ github.ref }} + cancel-in-progress: true + jobs: build: name: Build and Test strategy: matrix: os: [ubuntu-latest] - scala: [2.12.17, 2.13.10, 3.3.1] + scala: [2.12, 2.13, 3] java: [temurin@8, temurin@17] exclude: - - scala: 2.12.17 + - scala: 2.12 java: temurin@17 - - scala: 3.3.1 + - scala: 3 java: temurin@17 runs-on: ${{ matrix.os }} + timeout-minutes: 60 steps: - name: Checkout current branch (full) - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Download Java (temurin@8) - id: download-java-temurin-8 - if: matrix.java == 'temurin@8' - uses: typelevel/download-java@v2 - with: - distribution: temurin - java-version: 8 - - name: Setup Java (temurin@8) + id: setup-java-temurin-8 if: matrix.java == 'temurin@8' uses: actions/setup-java@v3 with: - distribution: jdkfile + distribution: temurin java-version: 8 - jdkFile: ${{ steps.download-java-temurin-8.outputs.jdkFile }} + cache: sbt - - name: Download Java (temurin@17) - id: download-java-temurin-17 - if: matrix.java == 'temurin@17' - uses: typelevel/download-java@v2 - with: - distribution: temurin - java-version: 17 + - name: sbt update + if: matrix.java == 'temurin@8' && steps.setup-java-temurin-8.outputs.cache-hit == 'false' + run: sbt +update - name: Setup Java (temurin@17) + id: setup-java-temurin-17 if: matrix.java == 'temurin@17' uses: actions/setup-java@v3 with: - distribution: jdkfile + distribution: temurin java-version: 17 - jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} + cache: sbt - - name: Cache sbt - uses: actions/cache@v3 - with: - path: | - ~/.sbt - ~/.ivy2/cache - ~/.coursier/cache/v1 - ~/.cache/coursier/v1 - ~/AppData/Local/Coursier/Cache/v1 - ~/Library/Caches/Coursier/v1 - key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + - name: sbt update + if: matrix.java == 'temurin@17' && steps.setup-java-temurin-17.outputs.cache-hit == 'false' + run: sbt +update - name: Check that workflows are up to date run: sbt githubWorkflowCheck @@ -101,65 +84,52 @@ jobs: strategy: matrix: os: [ubuntu-latest] - scala: [2.13.10] java: [temurin@8] runs-on: ${{ matrix.os }} steps: - name: Checkout current branch (full) - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Download Java (temurin@8) - id: download-java-temurin-8 - if: matrix.java == 'temurin@8' - uses: typelevel/download-java@v2 - with: - distribution: temurin - java-version: 8 - - name: Setup Java (temurin@8) + id: setup-java-temurin-8 if: matrix.java == 'temurin@8' uses: actions/setup-java@v3 with: - distribution: jdkfile + distribution: temurin java-version: 8 - jdkFile: ${{ steps.download-java-temurin-8.outputs.jdkFile }} + cache: sbt - - name: Download Java (temurin@17) - id: download-java-temurin-17 - if: matrix.java == 'temurin@17' - uses: typelevel/download-java@v2 - with: - distribution: temurin - java-version: 17 + - name: sbt update + if: matrix.java == 'temurin@8' && steps.setup-java-temurin-8.outputs.cache-hit == 'false' + run: sbt +update - name: Setup Java (temurin@17) + id: setup-java-temurin-17 if: matrix.java == 'temurin@17' uses: actions/setup-java@v3 with: - distribution: jdkfile + distribution: temurin java-version: 17 - jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} + cache: sbt - - name: Cache sbt - uses: actions/cache@v3 - with: - path: | - ~/.sbt - ~/.ivy2/cache - ~/.coursier/cache/v1 - ~/.cache/coursier/v1 - ~/AppData/Local/Coursier/Cache/v1 - ~/Library/Caches/Coursier/v1 - key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + - name: sbt update + if: matrix.java == 'temurin@17' && steps.setup-java-temurin-17.outputs.cache-hit == 'false' + run: sbt +update - name: Import signing key if: env.PGP_SECRET != '' && env.PGP_PASSPHRASE == '' + env: + PGP_SECRET: ${{ secrets.PGP_SECRET }} + PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} run: echo $PGP_SECRET | base64 -di | gpg --import - name: Import signing key and strip passphrase if: env.PGP_SECRET != '' && env.PGP_PASSPHRASE != '' + env: + PGP_SECRET: ${{ secrets.PGP_SECRET }} + PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} run: | echo "$PGP_SECRET" | base64 -di > /tmp/signing-key.gpg echo "$PGP_PASSPHRASE" | gpg --pinentry-mode loopback --passphrase-fd 0 --import /tmp/signing-key.gpg @@ -171,35 +141,71 @@ jobs: GIT_DEPLOY_KEY: ${{ secrets.GIT_DEPLOY_KEY }} SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} PGP_SECRET: ${{ secrets.PGP_SECRET }} - run: sbt '++ ${{ matrix.scala }}' tlRelease docs/docusaurusPublishGhpages + run: sbt tlRelease docs/docusaurusPublishGhpages - validate-steward: - name: Validate Steward Config + dependency-submission: + name: Submit Dependencies + if: github.event_name != 'pull_request' strategy: matrix: os: [ubuntu-latest] - scala: [2.13.6] - java: [temurin@11] + java: [temurin@8] runs-on: ${{ matrix.os }} steps: - - name: Checkout current branch (fast) - uses: actions/checkout@v3 + - name: Checkout current branch (full) + uses: actions/checkout@v4 + with: + fetch-depth: 0 - - name: Download Java (temurin@17) - id: download-java-temurin-17 - if: matrix.java == 'temurin@17' - uses: typelevel/download-java@v2 + - name: Setup Java (temurin@8) + id: setup-java-temurin-8 + if: matrix.java == 'temurin@8' + uses: actions/setup-java@v3 with: distribution: temurin - java-version: 17 + java-version: 8 + cache: sbt + + - name: sbt update + if: matrix.java == 'temurin@8' && steps.setup-java-temurin-8.outputs.cache-hit == 'false' + run: sbt +update - name: Setup Java (temurin@17) + id: setup-java-temurin-17 if: matrix.java == 'temurin@17' uses: actions/setup-java@v3 with: - distribution: jdkfile + distribution: temurin java-version: 17 - jdkFile: ${{ steps.download-java-temurin-17.outputs.jdkFile }} + cache: sbt + + - name: sbt update + if: matrix.java == 'temurin@17' && steps.setup-java-temurin-17.outputs.cache-hit == 'false' + run: sbt +update + + - name: Submit Dependencies + uses: scalacenter/sbt-dependency-submission@v2 + with: + configs-ignore: test scala-tool scala-doc-tool test-internal + + validate-steward: + name: Validate Steward Config + strategy: + matrix: + os: [ubuntu-latest] + java: [temurin@11] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout current branch (fast) + uses: actions/checkout@v4 + + - name: Setup Java (temurin@11) + id: setup-java-temurin-11 + if: matrix.java == 'temurin@11' + uses: actions/setup-java@v3 + with: + distribution: temurin + java-version: 11 - uses: coursier/setup-action@v1 with: diff --git a/build.sbt b/build.sbt index 88b6ff55d..bc31662f0 100644 --- a/build.sbt +++ b/build.sbt @@ -287,6 +287,9 @@ ThisBuild / crossScalaVersions := Seq(scala212, scala213, scala3) lazy val scalaSettings = Seq( Compile / doc / scalacOptions += "-nowarn", // workaround for https://github.com/scala/bug/issues/12007 but also suppresses genunine problems Compile / console / scalacOptions --= Seq("-Xlint", "-Ywarn-unused"), + Compile / compile / scalacOptions --= { + if (tlIsScala3.value) Seq("-Wvalue-discard", "-Wunused:privates") else Seq.empty + }, Test / console / scalacOptions := (Compile / console / scalacOptions).value, Compile / unmanagedSourceDirectories ++= Seq( diff --git a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala index f1a179f72..d54fe476e 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/syntax.scala @@ -11,7 +11,6 @@ import cats.effect.Async import cats.syntax.all._ import fs2.kafka.{Header, Headers, KafkaHeaders} import fs2.kafka.internal.converters.unsafeWrapArray -import fs2.kafka.internal.converters.collection._ import java.time.Duration import java.time.temporal.ChronoUnit @@ -125,6 +124,7 @@ private[kafka] object syntax { implicit final class MapWrappedValueSyntax[F[_], K, V]( private val map: Map[K, F[V]] ) extends AnyVal { + import fs2.kafka.internal.converters.collection._ def asJavaMap(implicit F: Foldable[F]): util.Map[K, util.Collection[V]] = map.map { case (k, fv) => k -> (fv.asJava: util.Collection[V]) }.asJava } diff --git a/project/plugins.sbt b/project/plugins.sbt index ffc2b0428..1bfd0d13e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,4 +4,4 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.3") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.9.0") addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.7") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") -addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.4.20") +addSbtPlugin("org.typelevel" % "sbt-typelevel" % "0.5.3") From 34c6e9affb70100b725d8ae46111ac6bf75b5b68 Mon Sep 17 00:00:00 2001 From: Dany Yanev Date: Fri, 29 Sep 2023 21:52:39 +0300 Subject: [PATCH 161/162] Updated docs on transactions (#1201) Co-authored-by: Dany Yanev --- docs/src/main/mdoc/transactions.md | 57 ++++++++++++++++++++---------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/docs/src/main/mdoc/transactions.md b/docs/src/main/mdoc/transactions.md index 179fc0300..fb1930119 100644 --- a/docs/src/main/mdoc/transactions.md +++ b/docs/src/main/mdoc/transactions.md @@ -5,14 +5,18 @@ title: Transactions Kafka transactions are supported through a [`TransactionalKafkaProducer`][transactionalkafkaproducer]. In order to use transactions, the following steps should be taken. For details on [consumers](consumers.md) and [producers](producers.md), see the respective sections. -- Create a `TransactionalProducerSettings` specifying the transactional ID. +- Create `KafkaConsumer` then split its stream into sub-streams - one for each topic. - Use `withIsolationLevel(IsolationLevel.ReadCommitted)` on `ConsumerSettings`. -- Use `TransactionalKafkaProducer.stream` to create a producer with support for transactions. +- Create `TransactionalKafkaProducer` for each sub-stream with `TransactionalProducerSettings` to create a producer with support for transactions with **partition unique** transaction id. Kafka requires partition unique transactional ids for producer "handover" and zombie fencing. + +- Use `.withEnableIdempotence(true)` and `.withRetries(n)` where `n > 0` on `ProducerSettings` - Create `CommittableProducerRecords` and wrap them in `TransactionalProducerRecords`. +- Combine all sub-streams into one stream. + > Note that calls to `produce` are sequenced in the `TransactionalKafkaProducer` to ensure that, when used concurrently, transactions don't run into each other resulting in an invalid transaction transition exception. > > Because the `TransactionalKafkaProducer` waits for the record batch to be flushed and the transaction committed on the broker, this could lead to performance bottlenecks where a single producer is shared among many threads. @@ -22,7 +26,10 @@ Following is an example where transactions are used to consume, process, produce ```scala mdoc import cats.effect.{IO, IOApp} +import fs2.Stream import fs2.kafka._ +import org.apache.kafka.common.TopicPartition + import scala.concurrent.duration._ object Main extends IOApp.Simple { @@ -37,33 +44,45 @@ object Main extends IOApp.Simple { .withBootstrapServers("localhost:9092") .withGroupId("group") - val producerSettings = + def producerSettings(partition: TopicPartition) = TransactionalProducerSettings( - "transactional-id", + s"transactional-id-$partition", ProducerSettings[IO, String, String] .withBootstrapServers("localhost:9092") + .withEnableIdempotence(true) + .withRetries(10) ) - val stream = - TransactionalKafkaProducer.stream(producerSettings) - .flatMap { producer => - KafkaConsumer.stream(consumerSettings) - .subscribeTo("topic") - .stream - .mapAsync(25) { committable => - processRecord(committable.record) - .map { case (key, value) => - val record = ProducerRecord("topic", key, value) - CommittableProducerRecords.one(record, committable.offset) + KafkaConsumer + .stream(consumerSettings) + .subscribeTo("topic") + .flatMap(_.partitionsMapStream) + .map( + _.map { + case (partition, stream) => + TransactionalKafkaProducer.stream(producerSettings(partition)).flatMap { producer => + stream + .mapAsync(25) { committable => + processRecord(committable.record) + .map { + case (key, value) => + val record = ProducerRecord("topic", key, value) + CommittableProducerRecords.one(record, committable.offset) + } } + .groupWithin(500, 15.seconds) + .evalMap(producer.produce) } - .groupWithin(500, 15.seconds) - .evalMap(producer.produce) } - - stream.compile.drain + ) + .flatMap { partitionsMap => + Stream.emits(partitionsMap.toVector).parJoinUnbounded + } + .compile + .drain } } + ``` [transactionalkafkaproducer]: @API_BASE_URL@/TransactionalKafkaProducer.html From 0774c7619b8d6fdf8afb2f60c6bdbe5603e74ebc Mon Sep 17 00:00:00 2001 From: Danila Matveev Date: Fri, 29 Sep 2023 23:11:00 +0400 Subject: [PATCH 162/162] Make instantiation of `CommitTimeoutException` lazy (#1150) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Alan Artigao CarreƱo --- .../main/scala/fs2/kafka/internal/KafkaConsumerActor.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala index 183f885c8..8f6863ee2 100644 --- a/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala +++ b/modules/core/src/main/scala/fs2/kafka/internal/KafkaConsumerActor.scala @@ -115,12 +115,12 @@ private[kafka] final class KafkaConsumerActor[F[_], K, V]( F.async[Unit] { (cb: Either[Throwable, Unit] => Unit) => k(cb).as(Some(F.unit)) } - .timeoutTo(settings.commitTimeout, F.raiseError[Unit] { + .timeoutTo(settings.commitTimeout, F.defer(F.raiseError[Unit] { CommitTimeoutException( settings.commitTimeout, offsets ) - }) + })) private[this] def manualCommitAsync(request: Request.ManualCommitAsync[F]): F[Unit] = { val commit = runCommitAsync(request.offsets) { cb =>