Skip to content

Commit cd75c5f

Browse files
authored
Merge pull request #2 from kaizen-solutions/feature/readme
Improvements
2 parents 793cd17 + 2e9a6d9 commit cd75c5f

File tree

5 files changed

+40
-9
lines changed

5 files changed

+40
-9
lines changed

README.md

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,45 @@ This functionality is backed by the following libraries:
1313
- [FS2 kafka](https://github.com/fd4s/fs2-kafka)
1414
- [Confluent Schema Registry](https://github.com/confluentinc/schema-registry)
1515

16-
### Usage ###
16+
### Usage
1717

1818
Add the following to your `build.sbt`
1919
```sbt
2020
resolvers ++= Seq("confluent" at "https://packages.confluent.io/maven")
2121
libraryDependencies += "io.kaizen-solutions" %% "fs2-kafka-jsonschema" % "<latest-version>"
2222
```
23+
24+
### Example
25+
26+
Define the datatype that you would like to send/receive over Kafka via the JSON + JSON Schema format. You do this by defining your datatype and providing a `Pickler` instance for it.
27+
The `Pickler` instance comes from the Tapir library.
28+
29+
```scala
30+
import sttp.tapir.Schema.annotations.*
31+
import sttp.tapir.json.pickler.*
32+
33+
final case class Book(
34+
@description("name of the book") name: String,
35+
@description("international standard book number") isbn: Int
36+
)
37+
object Book:
38+
given Pickler[Book] = Pickler.derived
39+
```
40+
41+
Next, you can create a fs2 Kafka `Serializer` and `Deserializer` for this datatype and use it when building your FS2 Kafka producer/consumer.
42+
43+
```scala
44+
import io.kaizensolutions.jsonschema.*
45+
import cats.effect.*
46+
import fs2.kafka.*
47+
48+
def bookSerializer[F[_]: Sync]: Resource[F, ValueSerializer[F, Book]] =
49+
JsonSchemaSerializerSettings.default
50+
.withSchemaRegistryUrl("http://localhost:8081")
51+
.forValue[F, Book]
52+
53+
def bookDeserializer[F[_]: Sync]: Resource[F, ValueDeserializer[F, Book]] =
54+
JsonSchemaDeserializerSettings.default
55+
.withSchemaRegistryUrl("http://localhost:8081")
56+
.forValue[F, Book]
57+
```

build.sbt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,10 @@ inThisBuild {
1414
ScalacOptions.unchecked,
1515
ScalacOptions.deprecation,
1616
ScalacOptions.warnValueDiscard,
17+
ScalacOptions.warnUnusedImports,
1718
ScalacOptions.warnDeadCode,
19+
ScalacOptions.warnUnusedImplicits,
20+
ScalacOptions.warnUnusedExplicits,
1821
ScalacOptions.release("17"),
1922
ScalacOptions.privateKindProjector
2023
)

src/main/scala/io/kaizensolutions/jsonschema/JsonSchemaSerializer.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,9 @@ import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient
99
import io.confluent.kafka.schemaregistry.json.{JsonSchema, JsonSchemaUtils}
1010
import io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializer
1111
import sttp.apispec.circe.*
12-
import sttp.apispec.{ExampleSingleValue, SchemaType}
1312
import sttp.tapir.docs.apispec.schema.*
1413
import sttp.tapir.json.pickler.Pickler
1514

16-
import scala.jdk.CollectionConverters.*
17-
1815
private[jsonschema] object JsonSchemaSerializer:
1916
def create[F[_], A](
2017
isKey: Boolean,

src/main/scala/io/kaizensolutions/jsonschema/JsonSchemaSerializerSettings.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ package io.kaizensolutions.jsonschema
22
import cats.effect.{Resource, Sync}
33
import com.fasterxml.jackson.databind.ObjectMapper
44
import fs2.kafka.*
5-
import io.circe.generic.auto
65
import io.confluent.kafka.schemaregistry.SchemaProvider
76
import io.confluent.kafka.schemaregistry.client.{CachedSchemaRegistryClient, SchemaRegistryClient}
87
import io.confluent.kafka.schemaregistry.json.{JsonSchemaProvider, SpecificationVersion}

src/test/scala/io/kaizensolutions/jsonschema/JsonSchemaSerDesSpec.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,18 +4,15 @@ import cats.effect.*
44
import cats.syntax.all.*
55
import fs2.Stream
66
import fs2.kafka.*
7-
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException
87
import io.confluent.kafka.schemaregistry.client.{CachedSchemaRegistryClient, SchemaRegistryClient}
98
import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider
109
import io.confluent.kafka.schemaregistry.{CompatibilityLevel, SchemaProvider}
1110
import io.github.embeddedkafka.schemaregistry.*
1211
import org.apache.kafka.common.errors.{InvalidConfigurationException, SerializationException as UnderlyingSerializationException}
13-
import sttp.tapir.Schema.annotations.*
1412
import sttp.tapir.json.pickler.Pickler
1513
import weaver.*
1614

17-
import java.io.File
18-
import scala.concurrent.duration.DurationInt
15+
import scala.concurrent.duration.*
1916
import scala.jdk.CollectionConverters.*
2017

2118
object JsonSchemaSerdesSpec extends IOSuite:

0 commit comments

Comments
 (0)