diff --git a/Makefile b/Makefile index c086653b6..e6dba3961 100644 --- a/Makefile +++ b/Makefile @@ -50,17 +50,19 @@ release_package: java_package docu_html: docu_htmlclean docu_check mkdir -p documentation/html $(CP) -vrL documentation/book/images documentation/html/images - asciidoctor -v --failure-level WARN -t -dbook -a ProductVersion=$(RELEASE_VERSION) documentation/book/bridge.adoc -o documentation/html/bridge.html + # override snippetDir for asciidoctor to get right path to the snippets + asciidoctor -v --failure-level WARN -t -dbook -a ProductVersion=$(RELEASE_VERSION) -a snippetDir=${PWD}/documentation/book/api/snippet/ documentation/book/bridge.adoc -o documentation/html/bridge.html .PHONY: docu_htmlnoheader docu_htmlnoheader: docu_htmlnoheaderclean docu_check mkdir -p documentation/htmlnoheader $(CP) -vrL documentation/book/images documentation/htmlnoheader/images - asciidoctor -v --failure-level WARN -t -dbook -a ProductVersion=$(RELEASE_VERSION) -s documentation/book/bridge.adoc -o documentation/htmlnoheader/bridge.html + # override snippetDir for asciidoctor to get right path to the snippets + asciidoctor -v --failure-level WARN -t -dbook -a ProductVersion=$(RELEASE_VERSION) -a snippetDir=${PWD}/documentation/book/api/snippet/ -s documentation/book/bridge.adoc -o documentation/htmlnoheader/bridge.html .PHONY: docu_api docu_api: - mvn $(MVN_ARGS) -P apidoc io.github.swagger2markup:swagger2markup-maven-plugin:convertSwagger2markup@generate-apidoc + mvn $(MVN_ARGS) -P apidoc org.openapitools:openapi-generator-maven-plugin:generate@generate-apidoc .PHONY: docu_check docu_check: docu_api diff --git a/documentation/book/api/.openapi-generator-ignore b/documentation/book/api/.openapi-generator-ignore new file mode 100644 index 000000000..7484ee590 --- /dev/null +++ b/documentation/book/api/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/documentation/book/api/.openapi-generator/FILES b/documentation/book/api/.openapi-generator/FILES new file mode 100644 index 000000000..94a153d20 --- /dev/null +++ b/documentation/book/api/.openapi-generator/FILES @@ -0,0 +1 @@ +index.adoc diff --git a/documentation/book/api/.openapi-generator/VERSION b/documentation/book/api/.openapi-generator/VERSION new file mode 100644 index 000000000..09a6d3084 --- /dev/null +++ b/documentation/book/api/.openapi-generator/VERSION @@ -0,0 +1 @@ +7.8.0 diff --git a/documentation/book/api/.openapi-generator/openapi.json-generate-apidoc.sha256 b/documentation/book/api/.openapi-generator/openapi.json-generate-apidoc.sha256 new file mode 100644 index 000000000..ec32758c4 --- /dev/null +++ b/documentation/book/api/.openapi-generator/openapi.json-generate-apidoc.sha256 @@ -0,0 +1 @@ +e4a70d99cdac5e8dddb45ce30c39817fa9402463074478e0fd3a05d6a7f849d5 \ No newline at end of file diff --git a/documentation/book/api/definitions.adoc b/documentation/book/api/definitions.adoc deleted file mode 100644 index a4183f533..000000000 --- a/documentation/book/api/definitions.adoc +++ /dev/null @@ -1,321 +0,0 @@ - -[[_definitions]] -== Definitions - -[[_assignedtopicpartitions]] -=== AssignedTopicPartitions -__Type__ : < string, < integer (int32) > array > map - - -[[_bridgeinfo]] -=== BridgeInfo -Information about Kafka Bridge instance. - - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**bridge_version** + -__optional__|string -|=== - - -[[_consumer]] -=== Consumer - -[options="header", cols=".^3a,.^11a,.^4a"] -|=== -|Name|Description|Schema -|**auto.offset.reset** + -__optional__|Resets the offset position for the consumer. If set to `latest` (default), messages are read from the latest offset. If set to `earliest`, messages are read from the first offset.|string -|**consumer.request.timeout.ms** + -__optional__|Sets the maximum amount of time, in milliseconds, for the consumer to wait for messages for a request. If the timeout period is reached without a response, an error is returned. Default is `30000` (30 seconds).|integer -|**enable.auto.commit** + -__optional__|If set to `true` (default), message offsets are committed automatically for the consumer. If set to `false`, message offsets must be committed manually.|boolean -|**fetch.min.bytes** + -__optional__|Sets the minimum amount of data, in bytes, for the consumer to receive. The broker waits until the data to send exceeds this amount. Default is `1` byte.|integer -|**format** + -__optional__|The allowable message format for the consumer, which can be `binary` (default) or `json`. The messages are converted into a JSON format.|string -|**isolation.level** + -__optional__|If set to `read_uncommitted` (default), all transaction records are retrieved, indpendent of any transaction outcome. If set to `read_committed`, the records from committed transactions are retrieved.|string -|**name** + -__optional__|The unique name for the consumer instance. The name is unique within the scope of the consumer group. The name is used in URLs. If a name is not specified, a randomly generated name is assigned.|string -|=== - - -[[_consumerrecord]] -=== ConsumerRecord - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**headers** + -__optional__|<<_kafkaheaderlist,KafkaHeaderList>> -|**offset** + -__optional__|integer (int64) -|**partition** + -__optional__|integer (int32) -|**timestamp** + -__optional__|integer (int64) -|**topic** + -__optional__|string -|=== - - -[[_consumerrecordlist]] -=== ConsumerRecordList -__Type__ : < <<_consumerrecord,ConsumerRecord>> > array - - -[[_createdconsumer]] -=== CreatedConsumer - -[options="header", cols=".^3a,.^11a,.^4a"] -|=== -|Name|Description|Schema -|**base_uri** + -__optional__|Base URI used to construct URIs for subsequent requests against this consumer instance.|string -|**instance_id** + -__optional__|Unique ID for the consumer instance in the group.|string -|=== - - -[[_error]] -=== Error - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**error_code** + -__optional__|integer (int32) -|**message** + -__optional__|string -|=== - - -[[_kafkaheader]] -=== KafkaHeader - -[options="header", cols=".^3a,.^11a,.^4a"] -|=== -|Name|Description|Schema -|**key** + -__required__||string -|**value** + -__required__|The header value in binary format, base64-encoded + -**Pattern** : `"^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==\|[A-Za-z0-9+/]{3}=)?$"`|string (byte) -|=== - - -[[_kafkaheaderlist]] -=== KafkaHeaderList -__Type__ : < <<_kafkaheader,KafkaHeader>> > array - - -[[_offsetcommitseek]] -=== OffsetCommitSeek - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**offset** + -__required__|integer (int64) -|**partition** + -__required__|integer (int32) -|**topic** + -__required__|string -|=== - - -[[_offsetcommitseeklist]] -=== OffsetCommitSeekList - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**offsets** + -__optional__|< <<_offsetcommitseek,OffsetCommitSeek>> > array -|=== - - -[[_offsetrecordsent]] -=== OffsetRecordSent - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**offset** + -__optional__|integer (int64) -|**partition** + -__optional__|integer (int32) -|=== - - -[[_offsetrecordsentlist]] -=== OffsetRecordSentList - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**offsets** + -__optional__|< <<_offsetrecordsent,OffsetRecordSent>> > array -|=== - - -[[_offsetssummary]] -=== OffsetsSummary - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**beginning_offset** + -__optional__|integer (int64) -|**end_offset** + -__optional__|integer (int64) -|=== - - -[[_partition]] -=== Partition - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**partition** + -__optional__|integer (int32) -|**topic** + -__optional__|string -|=== - - -[[_partitionmetadata]] -=== PartitionMetadata - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**leader** + -__optional__|integer (int32) -|**partition** + -__optional__|integer (int32) -|**replicas** + -__optional__|< <<_replica,Replica>> > array -|=== - - -[[_partitions]] -=== Partitions - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**partitions** + -__optional__|< <<_partition,Partition>> > array -|=== - - -[[_producerrecord]] -=== ProducerRecord - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**headers** + -__optional__|<<_kafkaheaderlist,KafkaHeaderList>> -|**partition** + -__optional__|integer (int32) -|**timestamp** + -__optional__|integer (int64) -|=== - - -[[_producerrecordlist]] -=== ProducerRecordList - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**records** + -__optional__|< <<_producerrecord,ProducerRecord>> > array -|=== - - -[[_producerrecordtopartition]] -=== ProducerRecordToPartition - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**headers** + -__optional__|<<_kafkaheaderlist,KafkaHeaderList>> -|=== - - -[[_producerrecordtopartitionlist]] -=== ProducerRecordToPartitionList - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**records** + -__optional__|< <<_producerrecordtopartition,ProducerRecordToPartition>> > array -|=== - - -[[_replica]] -=== Replica - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**broker** + -__optional__|integer (int32) -|**in_sync** + -__optional__|boolean -|**leader** + -__optional__|boolean -|=== - - -[[_subscribedtopiclist]] -=== SubscribedTopicList - -[options="header", cols=".^3a,.^4a"] -|=== -|Name|Schema -|**partitions** + -__optional__|< <<_assignedtopicpartitions,AssignedTopicPartitions>> > array -|**topics** + -__optional__|<<_topics,Topics>> -|=== - - -[[_topicmetadata]] -=== TopicMetadata - -[options="header", cols=".^3a,.^11a,.^4a"] -|=== -|Name|Description|Schema -|**configs** + -__optional__|Per-topic configuration overrides|< string, string > map -|**name** + -__optional__|Name of the topic|string -|**partitions** + -__optional__||< <<_partitionmetadata,PartitionMetadata>> > array -|=== - - -[[_topics]] -=== Topics - -[options="header", cols=".^3a,.^11a,.^4a"] -|=== -|Name|Description|Schema -|**topic_pattern** + -__optional__|A regex topic pattern for matching multiple topics|string -|**topics** + -__optional__||< string > array -|=== - - - diff --git a/documentation/book/api/index.adoc b/documentation/book/api/index.adoc new file mode 100644 index 000000000..8f053f1cf --- /dev/null +++ b/documentation/book/api/index.adoc @@ -0,0 +1,3768 @@ +:toc: left +:numbered: +:toclevels: 4 +:source-highlighter: highlightjs +:keywords: openapi, rest, Kafka Bridge API Reference +:specDir: +:snippetDir: ./documentation/book/api/snippet/ +:generator-template: v1 2019-12-20 +:info-url: https://openapi-generator.tech +:app-name: Kafka Bridge API Reference + += Kafka Bridge API Reference + +== Introduction +The Kafka Bridge provides a REST API for integrating HTTP based client applications with a Kafka cluster. You can use the API to create and manage consumers and send and receive records over HTTP rather than the native Kafka protocol. + + +// markup not found, no include::{specDir}intro.adoc[opts=optional] + + + +== Endpoints + + +[.Consumers] +=== Consumers + + +[.assign] +==== assign + +`POST /consumers/{groupid}/instances/{name}/assignments` + + + +===== Description + +Assigns one or more topic partitions to a consumer. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/assignments/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the consumer belongs. +| X +| null +| + +| name +| Name of the consumer to assign topic partitions to. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Partitions +| List of topic partitions to assign to the consumer. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Partitions assigned successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found. +| <> + + +| 409 +| Subscriptions to topics, partitions, and patterns are mutually exclusive. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/assignments/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/assignments/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/assignments/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/assignments/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.commit] +==== commit + +`POST /consumers/{groupid}/instances/{name}/offsets` + + + +===== Description + +Commits a list of consumer offsets. To commit offsets for all records fetched by the consumer, leave the request body empty. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/offsets/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the consumer belongs. +| X +| null +| + +| name +| Name of the consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| OffsetCommitSeekList +| List of consumer offsets to commit to the consumer offsets commit log. You can specify one or more topic partitions to commit offsets for. <> +| - +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Commit made successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/offsets/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/offsets/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/offsets/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/offsets/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.createConsumer] +==== createConsumer + +`POST /consumers/{groupid}` + + + +===== Description + +Creates a consumer instance in the given consumer group. You can optionally specify a consumer name and supported configuration options. It returns a base URI which must be used to construct URLs for subsequent requests against this consumer instance. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group in which to create the consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Consumer +| Name and configuration of the consumer. The name is unique within the scope of the consumer group. If a name is not specified, a randomly generated name is assigned. All parameters are optional. The supported configuration options are shown in the following example. <> +| - +| +| + +|=== + + + + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Consumer created successfully. +| <> + + +| 409 +| A consumer instance with the specified name already exists in the Kafka Bridge. +| <> + + +| 422 +| One or more consumer configuration options have invalid values. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.deleteConsumer] +==== deleteConsumer + +`DELETE /consumers/{groupid}/instances/{name}` + + + +===== Description + +Deletes a specified consumer instance. The request for this operation MUST use the base URL (including the host and port) returned in the response from the `POST` request to `/consumers/{groupid}` that was used to create this consumer. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/DELETE/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the consumer belongs. +| X +| null +| + +| name +| Name of the consumer to delete. +| X +| null +| + +|=== + + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Consumer removed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/DELETE/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/DELETE/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/DELETE/DELETE.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/DELETE/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.listSubscriptions] +==== listSubscriptions + +`GET /consumers/{groupid}/instances/{name}/subscription` + + + +===== Description + +Retrieves a list of the topics to which the consumer is subscribed. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/subscription/GET/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + + + + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| List of subscribed topics and partitions. +| <> + + +| 404 +| The specified consumer instance was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/subscription/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/subscription/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/subscription/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/subscription/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.poll] +==== poll + +`GET /consumers/{groupid}/instances/{name}/records` + + + +===== Description + +Retrieves records for a subscribed consumer, including message values, topics, and partitions. The request for this operation MUST use the base URL (including the host and port) returned in the response from the `POST` request to `/consumers/{groupid}` that was used to create this consumer. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/records/GET/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer to retrieve records from. +| X +| null +| + +|=== + + + + + +[cols="2,3,1,1,1"] +.Query Parameters +|=== +|Name| Description| Required| Default| Pattern + +| timeout +| The maximum amount of time, in milliseconds, that the HTTP Bridge spends retrieving records before timing out the request. +| - +| null +| + +| max_bytes +| The maximum size, in bytes, of unencoded keys and values that can be included in the response. Otherwise, an error response with code 422 is returned. +| - +| null +| + +|=== + + +===== Return Type + + +array[<>] + + +===== Content Type + +* application/vnd.kafka.json.v2+json +* application/vnd.kafka.binary.v2+json +* application/vnd.kafka.text.v2+json +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Poll request executed successfully. +| List[<>] + + +| 404 +| The specified consumer instance was not found. +| <> + + +| 406 +| The `format` used in the consumer creation request does not match the embedded format in the Accept header of this request or the bridge got a message from the topic which is not JSON encoded. +| <> + + +| 422 +| Response exceeds the maximum number of bytes the consumer can receive +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/records/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/records/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/records/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/records/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.seek] +==== seek + +`POST /consumers/{groupid}/instances/{name}/positions` + + + +===== Description + +Configures a subscribed consumer to fetch offsets from a particular offset the next time it fetches a set of records from a given topic partition. This overrides the default fetch behavior for consumers. You can specify one or more topic partitions. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| OffsetCommitSeekList +| List of partition offsets from which the subscribed consumer will next fetch records. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Seek performed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/positions/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.seekToBeginning] +==== seekToBeginning + +`POST /consumers/{groupid}/instances/{name}/positions/beginning` + + + +===== Description + +Configures a subscribed consumer to seek (and subsequently read from) the first offset in one or more given topic partitions. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Partitions +| List of topic partitions to which the consumer is subscribed. The consumer will seek the first offset in the specified partitions. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Seek to the beginning performed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/positions/beginning/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.seekToEnd] +==== seekToEnd + +`POST /consumers/{groupid}/instances/{name}/positions/end` + + + +===== Description + +Configures a subscribed consumer to seek (and subsequently read from) the offset at the end of one or more of the given topic partitions. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Partitions +| List of topic partitions to which the consumer is subscribed. The consumer will seek the last offset in the specified partitions. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Seek to the end performed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/positions/end/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.subscribe] +==== subscribe + +`POST /consumers/{groupid}/instances/{name}/subscription` + + + +===== Description + +Subscribes a consumer to one or more topics. You can describe the topics to which the consumer will subscribe in a list (of `Topics` type) or as a `topic_pattern` field. Each call replaces the subscriptions for the subscriber. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/subscription/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the consumer to subscribe to topics. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Topics +| List of topics to which the consumer will subscribe. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Consumer subscribed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found. +| <> + + +| 409 +| Subscriptions to topics, partitions, and patterns are mutually exclusive. +| <> + + +| 422 +| A list (of `Topics` type) or a `topic_pattern` must be specified. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/subscription/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/subscription/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/subscription/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/subscription/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.unsubscribe] +==== unsubscribe + +`DELETE /consumers/{groupid}/instances/{name}/subscription` + + + +===== Description + +Unsubscribes a consumer from all topics. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/subscription/DELETE/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the consumer to unsubscribe from topics. +| X +| null +| + +|=== + + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Consumer unsubscribed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/subscription/DELETE/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/subscription/DELETE/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/subscription/DELETE/DELETE.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/subscription/DELETE/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.Default] +=== Default + + +[.healthy] +==== healthy + +`GET /healthy` + + + +===== Description + +Check if the bridge is running. This does not necessarily imply that it is ready to accept requests. + + +// markup not found, no include::{specDir}healthy/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + + +- + + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| The bridge is healthy +| <<>> + + +| 500 +| The bridge is not healthy +| <<>> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}healthy/GET/http-request.adoc[opts=optional] + + +// markup not found, no include::{snippetDir}healthy/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :healthy/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}healthy/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.info] +==== info + +`GET /` + + + +===== Description + +Retrieves information about the Kafka Bridge instance, in JSON format. + + +// markup not found, no include::{specDir}GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + +<> + + +===== Content Type + +* application/json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Information about Kafka Bridge instance. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}GET/http-request.adoc[opts=optional] + + +include::{snippetDir}GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.metrics] +==== metrics + +`GET /metrics` + + + +===== Description + +Retrieves the bridge metrics in Prometheus format. + + +// markup not found, no include::{specDir}metrics/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + +<> + + +===== Content Type + +* text/plain + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Metrics in Prometheus format retrieved successfully. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}metrics/GET/http-request.adoc[opts=optional] + + +// markup not found, no include::{snippetDir}metrics/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :metrics/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}metrics/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.openapi] +==== openapi + +`GET /openapi` + + + +===== Description + +Retrieves the OpenAPI v2 specification in JSON format. + + +// markup not found, no include::{specDir}openapi/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + +<> + + +===== Content Type + +* application/json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| OpenAPI v2 specification in JSON format retrieved successfully. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}openapi/GET/http-request.adoc[opts=optional] + + +// markup not found, no include::{snippetDir}openapi/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :openapi/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}openapi/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.openapiv2] +==== openapiv2 + +`GET /openapi/v2` + + + +===== Description + +Retrieves the OpenAPI v2 specification in JSON format. + + +// markup not found, no include::{specDir}openapi/v2/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + +<> + + +===== Content Type + +* application/json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| OpenAPI v2 specification in JSON format retrieved successfully. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}openapi/v2/GET/http-request.adoc[opts=optional] + + +// markup not found, no include::{snippetDir}openapi/v2/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :openapi/v2/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}openapi/v2/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.openapiv3] +==== openapiv3 + +`GET /openapi/v3` + + + +===== Description + +Retrieves the OpenAPI v3 specification in JSON format. + + +// markup not found, no include::{specDir}openapi/v3/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + +<> + + +===== Content Type + +* application/json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| OpenAPI v3 specification in JSON format retrieved successfully. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}openapi/v3/GET/http-request.adoc[opts=optional] + + +// markup not found, no include::{snippetDir}openapi/v3/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :openapi/v3/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}openapi/v3/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.ready] +==== ready + +`GET /ready` + + + +===== Description + +Check if the bridge is ready and can accept requests. + + +// markup not found, no include::{specDir}ready/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + + +- + + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| The bridge is ready +| <<>> + + +| 500 +| The bridge is not ready +| <<>> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}ready/GET/http-request.adoc[opts=optional] + + +// markup not found, no include::{snippetDir}ready/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :ready/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}ready/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.Producer] +=== Producer + + +[.send] +==== send + +`POST /topics/{topicname}` + + + +===== Description + +Sends one or more records to a given topic, optionally specifying a partition, key, or both. + + +// markup not found, no include::{specDir}topics/\{topicname\}/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| ProducerRecordList +| <> +| X +| +| + +|=== + + + + +[cols="2,3,1,1,1"] +.Query Parameters +|=== +|Name| Description| Required| Default| Pattern + +| async +| Ignore metadata as result of the sending operation, not returning them to the client. If not specified it is false, metadata returned. +| - +| null +| + +|=== + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Records sent successfully. +| <> + + +| 404 +| The specified topic was not found. +| <> + + +| 422 +| The record list is not valid. +| <> + +|=== + +===== Samples + + +include::{snippetDir}topics/\{topicname\}/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.sendToPartition] +==== sendToPartition + +`POST /topics/{topicname}/partitions/{partitionid}` + + + +===== Description + +Sends one or more records to a given topic partition, optionally specifying a key. + + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +| partitionid +| ID of the partition to send records to or retrieve metadata from. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| ProducerRecordToPartitionList +| List of records to send to a given topic partition, including a value (required) and a key (optional). <> +| X +| +| + +|=== + + + + +[cols="2,3,1,1,1"] +.Query Parameters +|=== +|Name| Description| Required| Default| Pattern + +| async +| Whether to return immediately upon sending records, instead of waiting for metadata. No offsets will be returned if specified. Defaults to false. +| - +| null +| + +|=== + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Records sent successfully. +| <> + + +| 404 +| The specified topic partition was not found. +| <> + + +| 422 +| The record is not valid. +| <> + +|=== + +===== Samples + + +include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/partitions/{partitionid}/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.Seek] +=== Seek + + +[.seek] +==== seek + +`POST /consumers/{groupid}/instances/{name}/positions` + + + +===== Description + +Configures a subscribed consumer to fetch offsets from a particular offset the next time it fetches a set of records from a given topic partition. This overrides the default fetch behavior for consumers. You can specify one or more topic partitions. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| OffsetCommitSeekList +| List of partition offsets from which the subscribed consumer will next fetch records. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Seek performed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/positions/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.seekToBeginning] +==== seekToBeginning + +`POST /consumers/{groupid}/instances/{name}/positions/beginning` + + + +===== Description + +Configures a subscribed consumer to seek (and subsequently read from) the first offset in one or more given topic partitions. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Partitions +| List of topic partitions to which the consumer is subscribed. The consumer will seek the first offset in the specified partitions. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Seek to the beginning performed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/positions/beginning/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/beginning/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.seekToEnd] +==== seekToEnd + +`POST /consumers/{groupid}/instances/{name}/positions/end` + + + +===== Description + +Configures a subscribed consumer to seek (and subsequently read from) the offset at the end of one or more of the given topic partitions. + + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| groupid +| ID of the consumer group to which the subscribed consumer belongs. +| X +| null +| + +| name +| Name of the subscribed consumer. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| Partitions +| List of topic partitions to which the consumer is subscribed. The consumer will seek the last offset in the specified partitions. <> +| X +| +| + +|=== + + + + + +===== Return Type + + + + +- + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 204 +| Seek to the end performed successfully. +| <<>> + + +| 404 +| The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned. +| <> + +|=== + +===== Samples + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :consumers/{groupid}/instances/{name}/positions/end/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}consumers/\{groupid\}/instances/\{name\}/positions/end/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.Topics] +=== Topics + + +[.getOffsets] +==== getOffsets + +`GET /topics/{topicname}/partitions/{partitionid}/offsets` + + + +===== Description + +Retrieves a summary of the offsets for the topic partition. + + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/offsets/GET/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic containing the partition. +| X +| null +| + +| partitionid +| ID of the partition. +| X +| null +| + +|=== + + + + + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| A summary of the offsets of the topic partition. +| <> + + +| 404 +| The specified topic partition was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/offsets/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/offsets/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/partitions/{partitionid}/offsets/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/offsets/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.getPartition] +==== getPartition + +`GET /topics/{topicname}/partitions/{partitionid}` + + + +===== Description + +Retrieves partition metadata for the topic partition. + + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/GET/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +| partitionid +| ID of the partition to send records to or retrieve metadata from. +| X +| null +| + +|=== + + + + + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Partition metadata +| <> + + +| 404 +| The specified partition was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/partitions/{partitionid}/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.getTopic] +==== getTopic + +`GET /topics/{topicname}` + + + +===== Description + +Retrieves the metadata about a given topic. + + +// markup not found, no include::{specDir}topics/\{topicname\}/GET/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +|=== + + + + + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Topic metadata +| <> + + +| 404 +| The specified topic was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}topics/\{topicname\}/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.listPartitions] +==== listPartitions + +`GET /topics/{topicname}/partitions` + + + +===== Description + +Retrieves a list of partitions for the topic. + + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/GET/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +|=== + + + + + + +===== Return Type + + +array[<>] + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| List of partitions. +| List[<>] + + +| 404 +| The specified topic was not found. +| <> + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}topics/\{topicname\}/partitions/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/partitions/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/partitions/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.listTopics] +==== listTopics + +`GET /topics` + + + +===== Description + +Retrieves a list of all topics. + + +// markup not found, no include::{specDir}topics/GET/spec.adoc[opts=optional] + + + +===== Parameters + + + + + + + +===== Return Type + + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| List of topics. +| List[<>] + +|=== + +===== Samples + + +// markup not found, no include::{snippetDir}topics/GET/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/GET/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/GET/GET.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/GET/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.send] +==== send + +`POST /topics/{topicname}` + + + +===== Description + +Sends one or more records to a given topic, optionally specifying a partition, key, or both. + + +// markup not found, no include::{specDir}topics/\{topicname\}/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| ProducerRecordList +| <> +| X +| +| + +|=== + + + + +[cols="2,3,1,1,1"] +.Query Parameters +|=== +|Name| Description| Required| Default| Pattern + +| async +| Ignore metadata as result of the sending operation, not returning them to the client. If not specified it is false, metadata returned. +| - +| null +| + +|=== + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Records sent successfully. +| <> + + +| 404 +| The specified topic was not found. +| <> + + +| 422 +| The record list is not valid. +| <> + +|=== + +===== Samples + + +include::{snippetDir}topics/\{topicname\}/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[.sendToPartition] +==== sendToPartition + +`POST /topics/{topicname}/partitions/{partitionid}` + + + +===== Description + +Sends one or more records to a given topic partition, optionally specifying a key. + + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/spec.adoc[opts=optional] + + + +===== Parameters + + +[cols="2,3,1,1,1"] +.Path Parameters +|=== +|Name| Description| Required| Default| Pattern + +| topicname +| Name of the topic to send records to or retrieve metadata from. +| X +| null +| + +| partitionid +| ID of the partition to send records to or retrieve metadata from. +| X +| null +| + +|=== + + +[cols="2,3,1,1,1"] +.Body Parameter +|=== +|Name| Description| Required| Default| Pattern + +| ProducerRecordToPartitionList +| List of records to send to a given topic partition, including a value (required) and a key (optional). <> +| X +| +| + +|=== + + + + +[cols="2,3,1,1,1"] +.Query Parameters +|=== +|Name| Description| Required| Default| Pattern + +| async +| Whether to return immediately upon sending records, instead of waiting for metadata. No offsets will be returned if specified. Defaults to false. +| - +| null +| + +|=== + + +===== Return Type + + +<> + + +===== Content Type + +* application/vnd.kafka.v2+json + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + + +| 200 +| Records sent successfully. +| <> + + +| 404 +| The specified topic partition was not found. +| <> + + +| 422 +| The record is not valid. +| <> + +|=== + +===== Samples + + +include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/http-request.adoc[opts=optional] + + +include::{snippetDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/http-response.adoc[opts=optional] + + + +// file not found, no * wiremock data link :topics/{topicname}/partitions/{partitionid}/POST/POST.json[] + + +ifdef::internal-generation[] +===== Implementation + +// markup not found, no include::{specDir}topics/\{topicname\}/partitions/\{partitionid\}/POST/implementation.adoc[opts=optional] + + +endif::internal-generation[] + + +[#models] +== Models + + +[#BridgeInfo] +=== _BridgeInfo_ BridgeInfo + +Information about Kafka Bridge instance. + + +[.fields-BridgeInfo] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| bridge_version +| +| +| String +| +| + +|=== + + + +[#Consumer] +=== _Consumer_ Consumer + + + + +[.fields-Consumer] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| name +| +| +| String +| The unique name for the consumer instance. The name is unique within the scope of the consumer group. The name is used in URLs. If a name is not specified, a randomly generated name is assigned. +| + +| format +| +| +| String +| The allowable message format for the consumer, which can be `binary` (default) or `json`. The messages are converted into a JSON format. +| + +| auto.offset.reset +| +| +| String +| Resets the offset position for the consumer. If set to `latest` (default), messages are read from the latest offset. If set to `earliest`, messages are read from the first offset. +| + +| fetch.min.bytes +| +| +| Integer +| Sets the minimum amount of data, in bytes, for the consumer to receive. The broker waits until the data to send exceeds this amount. Default is `1` byte. +| + +| consumer.request.timeout.ms +| +| +| Integer +| Sets the maximum amount of time, in milliseconds, for the consumer to wait for messages for a request. If the timeout period is reached without a response, an error is returned. Default is `30000` (30 seconds). +| + +| enable.auto.commit +| +| +| Boolean +| If set to `true` (default), message offsets are committed automatically for the consumer. If set to `false`, message offsets must be committed manually. +| + +| isolation.level +| +| +| String +| If set to `read_uncommitted` (default), all transaction records are retrieved, indpendent of any transaction outcome. If set to `read_committed`, the records from committed transactions are retrieved. +| + +|=== + + + +[#ConsumerRecord] +=== _ConsumerRecord_ ConsumerRecord + + + + +[.fields-ConsumerRecord] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| key +| +| +| <> +| +| + +| offset +| +| +| Long +| +| int64 + +| partition +| +| +| Integer +| +| int32 + +| topic +| +| +| String +| +| + +| value +| +| X +| <> +| +| + +| headers +| +| +| List of <> +| +| + +| timestamp +| +| +| Long +| +| int64 + +|=== + + + +[#CreatedConsumer] +=== _CreatedConsumer_ CreatedConsumer + + + + +[.fields-CreatedConsumer] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| instance_id +| +| +| String +| Unique ID for the consumer instance in the group. +| + +| base_uri +| +| +| String +| Base URI used to construct URIs for subsequent requests against this consumer instance. +| + +|=== + + + +[#Error] +=== _Error_ Error + + + + +[.fields-Error] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| error_code +| +| +| Integer +| +| int32 + +| message +| +| +| String +| +| + +|=== + + + +[#KafkaHeader] +=== _KafkaHeader_ KafkaHeader + + + + +[.fields-KafkaHeader] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| key +| X +| +| String +| +| + +| value +| X +| +| byte[] +| The header value in binary format, base64-encoded +| byte + +|=== + + + +[#OffsetCommitSeek] +=== _OffsetCommitSeek_ OffsetCommitSeek + + + + +[.fields-OffsetCommitSeek] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partition +| X +| +| Integer +| +| int32 + +| offset +| X +| +| Long +| +| int64 + +| topic +| X +| +| String +| +| + +|=== + + + +[#OffsetCommitSeekList] +=== _OffsetCommitSeekList_ OffsetCommitSeekList + + + + +[.fields-OffsetCommitSeekList] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| offsets +| +| +| List of <> +| +| + +|=== + + + +[#OffsetRecordSent] +=== _OffsetRecordSent_ OffsetRecordSent + + + + +[.fields-OffsetRecordSent] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partition +| +| +| Integer +| +| int32 + +| offset +| +| +| Long +| +| int64 + +|=== + + + +[#OffsetRecordSentList] +=== _OffsetRecordSentList_ OffsetRecordSentList + + + + +[.fields-OffsetRecordSentList] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| offsets +| +| +| List of <> +| +| + +|=== + + + +[#OffsetRecordSentListOffsetsInner] +=== _OffsetRecordSentListOffsetsInner_ + + + + +[.fields-OffsetRecordSentListOffsetsInner] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partition +| +| +| Integer +| +| int32 + +| offset +| +| +| Long +| +| int64 + +| error_code +| +| +| Integer +| +| int32 + +| message +| +| +| String +| +| + +|=== + + + +[#OffsetsSummary] +=== _OffsetsSummary_ OffsetsSummary + + + + +[.fields-OffsetsSummary] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| beginning_offset +| +| +| Long +| +| int64 + +| end_offset +| +| +| Long +| +| int64 + +|=== + + + +[#Partition] +=== _Partition_ Partition + + + + +[.fields-Partition] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partition +| +| +| Integer +| +| int32 + +| topic +| +| +| String +| +| + +|=== + + + +[#PartitionMetadata] +=== _PartitionMetadata_ PartitionMetadata + + + + +[.fields-PartitionMetadata] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partition +| +| +| Integer +| +| int32 + +| leader +| +| +| Integer +| +| int32 + +| replicas +| +| +| List of <> +| +| + +|=== + + + +[#Partitions] +=== _Partitions_ Partitions + + + + +[.fields-Partitions] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partitions +| +| +| List of <> +| +| + +|=== + + + +[#ProducerRecord] +=== _ProducerRecord_ ProducerRecord + + + + +[.fields-ProducerRecord] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| partition +| +| +| Integer +| +| int32 + +| timestamp +| +| +| Long +| +| int64 + +| value +| X +| X +| <> +| +| + +| key +| +| +| <> +| +| + +| headers +| +| +| List of <> +| +| + +|=== + + + +[#ProducerRecordList] +=== _ProducerRecordList_ ProducerRecordList + + + + +[.fields-ProducerRecordList] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| records +| +| +| List of <> +| +| + +|=== + + + +[#ProducerRecordToPartition] +=== _ProducerRecordToPartition_ ProducerRecordToPartition + + + + +[.fields-ProducerRecordToPartition] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| value +| X +| X +| <> +| +| + +| key +| +| +| <> +| +| + +| headers +| +| +| List of <> +| +| + +|=== + + + +[#ProducerRecordToPartitionList] +=== _ProducerRecordToPartitionList_ ProducerRecordToPartitionList + + + + +[.fields-ProducerRecordToPartitionList] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| records +| +| +| List of <> +| +| + +|=== + + + +[#RecordKey] +=== _RecordKey_ RecordKey + +Key representation for a record. It can be an array, a JSON object or a string + + +[.fields-RecordKey] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +|=== + + + +[#RecordValue] +=== _RecordValue_ RecordValue + +Value representation for a record. It can be an array, a JSON object or a string + + +[.fields-RecordValue] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +|=== + + + +[#Replica] +=== _Replica_ Replica + + + + +[.fields-Replica] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| broker +| +| +| Integer +| +| int32 + +| leader +| +| +| Boolean +| +| + +| in_sync +| +| +| Boolean +| +| + +|=== + + + +[#SubscribedTopicList] +=== _SubscribedTopicList_ SubscribedTopicList + + + + +[.fields-SubscribedTopicList] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| topics +| +| +| <> +| +| + +| partitions +| +| +| List of <> +| +| int32 + +|=== + + + +[#TopicMetadata] +=== _TopicMetadata_ TopicMetadata + + + + +[.fields-TopicMetadata] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| name +| +| +| String +| Name of the topic +| + +| configs +| +| +| Map of <> +| Per-topic configuration overrides +| + +| partitions +| +| +| List of <> +| +| + +|=== + + + +[#Topics] +=== _Topics_ Topics + + + + +[.fields-Topics] +[cols="2,1,1,2,4,1"] +|=== +| Field Name| Required| Nullable | Type| Description | Format + +| topics +| +| +| List of <> +| +| + +| topic_pattern +| +| +| String +| A regex topic pattern for matching multiple topics +| + +|=== + + diff --git a/documentation/book/api/overview.adoc b/documentation/book/api/overview.adoc deleted file mode 100644 index 5a77af727..000000000 --- a/documentation/book/api/overview.adoc +++ /dev/null @@ -1,32 +0,0 @@ -= Kafka Bridge API Reference - - -[[_overview]] -== Overview -The Kafka Bridge provides a REST API for integrating HTTP based client applications with a Kafka cluster. You can use the API to create and manage consumers and send and receive records over HTTP rather than the native Kafka protocol. - - -=== Version information -[%hardbreaks] -__Version__ : 0.1.0 - - -=== Tags - -* Consumers : Consumer operations to create consumers in your Kafka cluster and perform common actions, such as subscribing to topics, retrieving processed records, and committing offsets. -* Producer : Producer operations to send records to a specified topic or topic partition. -* Seek : Seek operations that enable a consumer to begin receiving messages from a given offset position. -* Topics : Topic operations to send messages to a specified topic or topic partition, optionally including message keys in requests. You can also retrieve topics and topic metadata. - - -=== Consumes - -* `application/json` - - -=== Produces - -* `application/json` - - - diff --git a/documentation/book/api/paths.adoc b/documentation/book/api/paths.adoc deleted file mode 100644 index aab118025..000000000 --- a/documentation/book/api/paths.adoc +++ /dev/null @@ -1,1560 +0,0 @@ - -[[_paths]] -== Paths - -[[_info]] -=== GET / - -==== Description -Retrieves information about the Kafka Bridge instance, in JSON format. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Information about Kafka Bridge instance.|<<_bridgeinfo,BridgeInfo>> -|=== - - -==== Produces - -* `application/json` - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "bridge_version" : "0.16.0" -} ----- - - -[[_createconsumer]] -=== POST /consumers/{groupid} - -==== Description -Creates a consumer instance in the given consumer group. You can optionally specify a consumer name and supported configuration options. It returns a base URI which must be used to construct URLs for subsequent requests against this consumer instance. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group in which to create the consumer.|string -|**Body**|**body** + -__required__|Name and configuration of the consumer. The name is unique within the scope of the consumer group. If a name is not specified, a randomly generated name is assigned. All parameters are optional. The supported configuration options are shown in the following example.|<<_consumer,Consumer>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Consumer created successfully.|<<_createdconsumer,CreatedConsumer>> -|**409**|A consumer instance with the specified name already exists in the Kafka Bridge.|<<_error,Error>> -|**422**|One or more consumer configuration options have invalid values.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "name" : "consumer1", - "format" : "binary", - "auto.offset.reset" : "earliest", - "enable.auto.commit" : false, - "fetch.min.bytes" : 512, - "consumer.request.timeout.ms" : 30000, - "isolation.level" : "read_committed" -} ----- - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "instance_id" : "consumer1", - "base_uri" : "http://localhost:8080/consumers/my-group/instances/consumer1" -} ----- - - -===== Response 409 -[source,json] ----- -{ - "error_code" : 409, - "message" : "A consumer instance with the specified name already exists in the Kafka Bridge." -} ----- - - -===== Response 422 -[source,json] ----- -{ - "error_code" : 422, - "message" : "One or more consumer configuration options have invalid values." -} ----- - - -[[_deleteconsumer]] -=== DELETE /consumers/{groupid}/instances/{name} - -==== Description -Deletes a specified consumer instance. The request for this operation MUST use the base URL (including the host and port) returned in the response from the `POST` request to `/consumers/{groupid}` that was used to create this consumer. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the consumer belongs.|string -|**Path**|**name** + -__required__|Name of the consumer to delete.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Consumer removed successfully.|No Content -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_assign]] -=== POST /consumers/{groupid}/instances/{name}/assignments - -==== Description -Assigns one or more topic partitions to a consumer. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the consumer belongs.|string -|**Path**|**name** + -__required__|Name of the consumer to assign topic partitions to.|string -|**Body**|**body** + -__required__|List of topic partitions to assign to the consumer.|<<_partitions,Partitions>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Partitions assigned successfully.|No Content -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|**409**|Subscriptions to topics, partitions, and patterns are mutually exclusive.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "partitions" : [ { - "topic" : "topic", - "partition" : 0 - }, { - "topic" : "topic", - "partition" : 1 - } ] -} ----- - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -===== Response 409 -[source,json] ----- -{ - "error_code" : 409, - "message" : "Subscriptions to topics, partitions, and patterns are mutually exclusive." -} ----- - - -[[_commit]] -=== POST /consumers/{groupid}/instances/{name}/offsets - -==== Description -Commits a list of consumer offsets. To commit offsets for all records fetched by the consumer, leave the request body empty. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the consumer belongs.|string -|**Path**|**name** + -__required__|Name of the consumer.|string -|**Body**|**body** + -__optional__|List of consumer offsets to commit to the consumer offsets commit log. You can specify one or more topic partitions to commit offsets for.|<<_offsetcommitseeklist,OffsetCommitSeekList>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Commit made successfully.|No Content -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "offsets" : [ { - "topic" : "topic", - "partition" : 0, - "offset" : 15 - }, { - "topic" : "topic", - "partition" : 1, - "offset" : 42 - } ] -} ----- - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_seek]] -=== POST /consumers/{groupid}/instances/{name}/positions - -==== Description -Configures a subscribed consumer to fetch offsets from a particular offset the next time it fetches a set of records from a given topic partition. This overrides the default fetch behavior for consumers. You can specify one or more topic partitions. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the consumer belongs.|string -|**Path**|**name** + -__required__|Name of the subscribed consumer.|string -|**Body**|**body** + -__required__|List of partition offsets from which the subscribed consumer will next fetch records.|<<_offsetcommitseeklist,OffsetCommitSeekList>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Seek performed successfully.|No Content -|**404**|The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers -* Seek - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "offsets" : [ { - "topic" : "topic", - "partition" : 0, - "offset" : 15 - }, { - "topic" : "topic", - "partition" : 1, - "offset" : 42 - } ] -} ----- - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_seektobeginning]] -=== POST /consumers/{groupid}/instances/{name}/positions/beginning - -==== Description -Configures a subscribed consumer to seek (and subsequently read from) the first offset in one or more given topic partitions. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the subscribed consumer belongs.|string -|**Path**|**name** + -__required__|Name of the subscribed consumer.|string -|**Body**|**body** + -__required__|List of topic partitions to which the consumer is subscribed. The consumer will seek the first offset in the specified partitions.|<<_partitions,Partitions>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Seek to the beginning performed successfully.|No Content -|**404**|The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers -* Seek - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "partitions" : [ { - "topic" : "topic", - "partition" : 0 - }, { - "topic" : "topic", - "partition" : 1 - } ] -} ----- - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_seektoend]] -=== POST /consumers/{groupid}/instances/{name}/positions/end - -==== Description -Configures a subscribed consumer to seek (and subsequently read from) the offset at the end of one or more of the given topic partitions. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the subscribed consumer belongs.|string -|**Path**|**name** + -__required__|Name of the subscribed consumer.|string -|**Body**|**body** + -__optional__|List of topic partitions to which the consumer is subscribed. The consumer will seek the last offset in the specified partitions.|<<_partitions,Partitions>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Seek to the end performed successfully.|No Content -|**404**|The specified consumer instance was not found, or the specified consumer instance did not have one of the specified partitions assigned.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers -* Seek - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "partitions" : [ { - "topic" : "topic", - "partition" : 0 - }, { - "topic" : "topic", - "partition" : 1 - } ] -} ----- - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_poll]] -=== GET /consumers/{groupid}/instances/{name}/records - -==== Description -Retrieves records for a subscribed consumer, including message values, topics, and partitions. The request for this operation MUST use the base URL (including the host and port) returned in the response from the `POST` request to `/consumers/{groupid}` that was used to create this consumer. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the subscribed consumer belongs.|string -|**Path**|**name** + -__required__|Name of the subscribed consumer to retrieve records from.|string -|**Query**|**max_bytes** + -__optional__|The maximum size, in bytes, of unencoded keys and values that can be included in the response. Otherwise, an error response with code 422 is returned.|integer -|**Query**|**timeout** + -__optional__|The maximum amount of time, in milliseconds, that the HTTP Bridge spends retrieving records before timing out the request.|integer -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Poll request executed successfully.|<<_consumerrecordlist,ConsumerRecordList>> -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|**406**|The `format` used in the consumer creation request does not match the embedded format in the Accept header of this request or the bridge got a message from the topic which is not JSON encoded.|<<_error,Error>> -|**422**|Response exceeds the maximum number of bytes the consumer can receive|<<_error,Error>> -|=== - - -==== Produces - -* `application/vnd.kafka.json.v2+json` -* `application/vnd.kafka.binary.v2+json` -* `application/vnd.kafka.text.v2+json` -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -[ { - "topic" : "topic", - "key" : "key1", - "value" : { - "foo" : "bar" - }, - "partition" : 0, - "offset" : 2 -}, { - "topic" : "topic", - "key" : "key2", - "value" : [ "foo2", "bar2" ], - "partition" : 1, - "offset" : 3 -} ] ----- - -[source,json] ----- -[ - { - "topic": "test", - "key": "a2V5", - "value": "Y29uZmx1ZW50", - "partition": 1, - "offset": 100, - }, - { - "topic": "test", - "key": "a2V5", - "value": "a2Fma2E=", - "partition": 2, - "offset": 101, - } -] ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -===== Response 406 -[source,json] ----- -{ - "error_code" : 406, - "message" : "The `format` used in the consumer creation request does not match the embedded format in the Accept header of this request." -} ----- - - -===== Response 422 -[source,json] ----- -{ - "error_code" : 422, - "message" : "Response exceeds the maximum number of bytes the consumer can receive" -} ----- - - -[[_subscribe]] -=== POST /consumers/{groupid}/instances/{name}/subscription - -==== Description -Subscribes a consumer to one or more topics. You can describe the topics to which the consumer will subscribe in a list (of `Topics` type) or as a `topic_pattern` field. Each call replaces the subscriptions for the subscriber. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the subscribed consumer belongs.|string -|**Path**|**name** + -__required__|Name of the consumer to subscribe to topics.|string -|**Body**|**body** + -__required__|List of topics to which the consumer will subscribe.|<<_topics,Topics>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Consumer subscribed successfully.|No Content -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|**409**|Subscriptions to topics, partitions, and patterns are mutually exclusive.|<<_error,Error>> -|**422**|A list (of `Topics` type) or a `topic_pattern` must be specified.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "topics" : [ "topic1", "topic2" ] -} ----- - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -===== Response 409 -[source,json] ----- -{ - "error_code" : 409, - "message" : "Subscriptions to topics, partitions, and patterns are mutually exclusive." -} ----- - - -===== Response 422 -[source,json] ----- -{ - "error_code" : 422, - "message" : "A list (of Topics type) or a topic_pattern must be specified." -} ----- - - -[[_listsubscriptions]] -=== GET /consumers/{groupid}/instances/{name}/subscription - -==== Description -Retrieves a list of the topics to which the consumer is subscribed. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the subscribed consumer belongs.|string -|**Path**|**name** + -__required__|Name of the subscribed consumer.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|List of subscribed topics and partitions.|<<_subscribedtopiclist,SubscribedTopicList>> -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|=== - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Consumers - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "topics" : [ "my-topic1", "my-topic2" ], - "partitions" : [ { - "my-topic1" : [ 1, 2, 3 ] - }, { - "my-topic2" : [ 1 ] - } ] -} ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_unsubscribe]] -=== DELETE /consumers/{groupid}/instances/{name}/subscription - -==== Description -Unsubscribes a consumer from all topics. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**groupid** + -__required__|ID of the consumer group to which the subscribed consumer belongs.|string -|**Path**|**name** + -__required__|Name of the consumer to unsubscribe from topics.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|Consumer unsubscribed successfully.|No Content -|**404**|The specified consumer instance was not found.|<<_error,Error>> -|=== - - -==== Tags - -* Consumers - - -==== Example HTTP response - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified consumer instance was not found." -} ----- - - -[[_healthy]] -=== GET /healthy - -==== Description -Check if the bridge is running. This does not necessarily imply that it is ready to accept requests. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|The bridge is healthy|No Content -|**500**|The bridge is not healthy|No Content -|=== - - -[[_metrics]] -=== GET /metrics - -==== Description -Retrieves the bridge metrics in Prometheus format. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Metrics in Prometheus format retrieved successfully.|string -|=== - - -==== Produces - -* `text/plain` - - -[[_openapi]] -=== GET /openapi - -==== Description -Retrieves the OpenAPI v2 specification in JSON format. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|OpenAPI v2 specification in JSON format retrieved successfully.|string -|=== - - -==== Produces - -* `application/json` - - -[[_openapiv2]] -=== GET /openapi/v2 - -==== Description -Retrieves the OpenAPI v2 specification in JSON format. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|OpenAPI v2 specification in JSON format retrieved successfully.|string -|=== - - -==== Produces - -* `application/json` - - -[[_openapiv3]] -=== GET /openapi/v3 - -==== Description -Retrieves the OpenAPI v3 specification in JSON format. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|OpenAPI v3 specification in JSON format retrieved successfully.|string -|=== - - -==== Produces - -* `application/json` - - -[[_ready]] -=== GET /ready - -==== Description -Check if the bridge is ready and can accept requests. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**204**|The bridge is ready|No Content -|**500**|The bridge is not ready|No Content -|=== - - -[[_listtopics]] -=== GET /topics - -==== Description -Retrieves a list of all topics. - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|List of topics.|< string > array -|=== - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Topics - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -[ "topic1", "topic2" ] ----- - - -[[_send]] -=== POST /topics/{topicname} - -==== Description -Sends one or more records to a given topic, optionally specifying a partition, key, or both. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**topicname** + -__required__|Name of the topic to send records to or retrieve metadata from.|string -|**Query**|**async** + -__optional__|Whether to return immediately upon sending records, instead of waiting for metadata. No offsets will be returned if specified. Defaults to false.|boolean -|**Body**|**body** + -__required__||<<_producerrecordlist,ProducerRecordList>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Records sent successfully.|<<_offsetrecordsentlist,OffsetRecordSentList>> -|**404**|The specified topic was not found.|<<_error,Error>> -|**422**|The record list is not valid.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.json.v2+json` -* `application/vnd.kafka.binary.v2+json` -* `application/vnd.kafka.text.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Producer -* Topics - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "records" : [ { - "key" : "key1", - "value" : "value1" - }, { - "value" : "value2", - "partition" : 1 - }, { - "value" : "value3" - } ] -} ----- - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "offsets" : [ { - "partition" : 2, - "offset" : 0 - }, { - "partition" : 1, - "offset" : 1 - }, { - "partition" : 2, - "offset" : 2 - } ] -} ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified topic was not found." -} ----- - - -===== Response 422 -[source,json] ----- -{ - "error_code" : 422, - "message" : "The record list contains invalid records." -} ----- - - -[[_gettopic]] -=== GET /topics/{topicname} - -==== Description -Retrieves the metadata about a given topic. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**topicname** + -__required__|Name of the topic to send records to or retrieve metadata from.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Topic metadata|<<_topicmetadata,TopicMetadata>> -|=== - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Topics - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "name" : "topic", - "offset" : 2, - "configs" : { - "cleanup.policy" : "compact" - }, - "partitions" : [ { - "partition" : 1, - "leader" : 1, - "replicas" : [ { - "broker" : 1, - "leader" : true, - "in_sync" : true - }, { - "broker" : 2, - "leader" : false, - "in_sync" : true - } ] - }, { - "partition" : 2, - "leader" : 2, - "replicas" : [ { - "broker" : 1, - "leader" : false, - "in_sync" : true - }, { - "broker" : 2, - "leader" : true, - "in_sync" : true - } ] - } ] -} ----- - - -[[_listpartitions]] -=== GET /topics/{topicname}/partitions - -==== Description -Retrieves a list of partitions for the topic. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**topicname** + -__required__|Name of the topic to send records to or retrieve metadata from.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|List of partitions|< <<_partitionmetadata,PartitionMetadata>> > array -|**404**|The specified topic was not found.|<<_error,Error>> -|=== - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Topics - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -[ { - "partition" : 1, - "leader" : 1, - "replicas" : [ { - "broker" : 1, - "leader" : true, - "in_sync" : true - }, { - "broker" : 2, - "leader" : false, - "in_sync" : true - } ] -}, { - "partition" : 2, - "leader" : 2, - "replicas" : [ { - "broker" : 1, - "leader" : false, - "in_sync" : true - }, { - "broker" : 2, - "leader" : true, - "in_sync" : true - } ] -} ] ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified topic was not found." -} ----- - - -[[_sendtopartition]] -=== POST /topics/{topicname}/partitions/{partitionid} - -==== Description -Sends one or more records to a given topic partition, optionally specifying a key. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**partitionid** + -__required__|ID of the partition to send records to or retrieve metadata from.|integer -|**Path**|**topicname** + -__required__|Name of the topic to send records to or retrieve metadata from.|string -|**Query**|**async** + -__optional__|Whether to return immediately upon sending records, instead of waiting for metadata. No offsets will be returned if specified. Defaults to false.|boolean -|**Body**|**body** + -__required__|List of records to send to a given topic partition, including a value (required) and a key (optional).|<<_producerrecordtopartitionlist,ProducerRecordToPartitionList>> -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Records sent successfully.|<<_offsetrecordsentlist,OffsetRecordSentList>> -|**404**|The specified topic partition was not found.|<<_error,Error>> -|**422**|The record is not valid.|<<_error,Error>> -|=== - - -==== Consumes - -* `application/vnd.kafka.json.v2+json` -* `application/vnd.kafka.binary.v2+json` -* `application/vnd.kafka.text.v2+json` - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Producer -* Topics - - -==== Example HTTP request - -===== Request body -[source,json] ----- -{ - "records" : [ { - "key" : "key1", - "value" : "value1" - }, { - "value" : "value2" - } ] -} ----- - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "offsets" : [ { - "partition" : 2, - "offset" : 0 - }, { - "partition" : 1, - "offset" : 1 - }, { - "partition" : 2, - "offset" : 2 - } ] -} ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified topic partition was not found." -} ----- - - -===== Response 422 -[source,json] ----- -{ - "error_code" : 422, - "message" : "The record is not valid." -} ----- - - -[[_getpartition]] -=== GET /topics/{topicname}/partitions/{partitionid} - -==== Description -Retrieves partition metadata for the topic partition. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**partitionid** + -__required__|ID of the partition to send records to or retrieve metadata from.|integer -|**Path**|**topicname** + -__required__|Name of the topic to send records to or retrieve metadata from.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|Partition metadata|<<_partitionmetadata,PartitionMetadata>> -|**404**|The specified topic partition was not found.|<<_error,Error>> -|=== - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Topics - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "partition" : 1, - "leader" : 1, - "replicas" : [ { - "broker" : 1, - "leader" : true, - "in_sync" : true - }, { - "broker" : 2, - "leader" : false, - "in_sync" : true - } ] -} ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified topic partition was not found." -} ----- - - -[[_getoffsets]] -=== GET /topics/{topicname}/partitions/{partitionid}/offsets - -==== Description -Retrieves a summary of the offsets for the topic partition. - - -==== Parameters - -[options="header", cols=".^2a,.^3a,.^9a,.^4a"] -|=== -|Type|Name|Description|Schema -|**Path**|**partitionid** + -__required__|ID of the partition.|integer -|**Path**|**topicname** + -__required__|Name of the topic containing the partition.|string -|=== - - -==== Responses - -[options="header", cols=".^2a,.^14a,.^4a"] -|=== -|HTTP Code|Description|Schema -|**200**|A summary of the offsets for the topic partition.|<<_offsetssummary,OffsetsSummary>> -|**404**|The specified topic partition was not found.|<<_error,Error>> -|=== - - -==== Produces - -* `application/vnd.kafka.v2+json` - - -==== Tags - -* Topics - - -==== Example HTTP response - -===== Response 200 -[source,json] ----- -{ - "beginning_offset" : 10, - "end_offset" : 50 -} ----- - - -===== Response 404 -[source,json] ----- -{ - "error_code" : 404, - "message" : "The specified topic partition was not found." -} ----- - - - diff --git a/documentation/book/api/security.adoc b/documentation/book/api/security.adoc deleted file mode 100644 index 139597f9c..000000000 --- a/documentation/book/api/security.adoc +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/documentation/book/api/snippet/GET/http-response.adoc b/documentation/book/api/snippet/GET/http-response.adoc new file mode 100644 index 000000000..1c1a0a0a0 --- /dev/null +++ b/documentation/book/api/snippet/GET/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +[subs=attributes+] +---- +{ + "bridge_version" : {ProductVersion} +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/POST/http-request.adoc new file mode 100644 index 000000000..a6aff0391 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/POST/http-request.adoc @@ -0,0 +1,15 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "name" : "consumer1", + "format" : "binary", + "auto.offset.reset" : "earliest", + "enable.auto.commit" : false, + "fetch.min.bytes" : 512, + "consumer.request.timeout.ms" : 30000, + "isolation.level" : "read_committed" +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/POST/http-response.adoc new file mode 100644 index 000000000..de1717128 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/POST/http-response.adoc @@ -0,0 +1,30 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "instance_id" : "consumer1", + "base_uri" : "http://localhost:8080/consumers/my-group/instances/consumer1" +} +---- + + +===== Response 409 +[source,json] +---- +{ + "error_code" : 409, + "message" : "A consumer instance with the specified name already exists in the Kafka Bridge." +} +---- + + +===== Response 422 +[source,json] +---- +{ + "error_code" : 422, + "message" : "One or more consumer configuration options have invalid values." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/DELETE/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/DELETE/http-response.adoc new file mode 100644 index 000000000..ef1420574 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/DELETE/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/assignments/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/assignments/POST/http-request.adoc new file mode 100644 index 000000000..268640ca6 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/assignments/POST/http-request.adoc @@ -0,0 +1,15 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "partitions" : [ { + "topic" : "topic", + "partition" : 0 + }, { + "topic" : "topic", + "partition" : 1 + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/assignments/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/assignments/POST/http-response.adoc new file mode 100644 index 000000000..7ce00de2b --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/assignments/POST/http-response.adoc @@ -0,0 +1,20 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- + + +===== Response 409 +[source,json] +---- +{ + "error_code" : 409, + "message" : "Subscriptions to topics, partitions, and patterns are mutually exclusive." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/offsets/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/offsets/POST/http-request.adoc new file mode 100644 index 000000000..e7da94f3f --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/offsets/POST/http-request.adoc @@ -0,0 +1,17 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "offsets" : [ { + "topic" : "topic", + "partition" : 0, + "offset" : 15 + }, { + "topic" : "topic", + "partition" : 1, + "offset" : 42 + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/offsets/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/offsets/POST/http-response.adoc new file mode 100644 index 000000000..ef1420574 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/offsets/POST/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/POST/http-request.adoc new file mode 100644 index 000000000..e7da94f3f --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/POST/http-request.adoc @@ -0,0 +1,17 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "offsets" : [ { + "topic" : "topic", + "partition" : 0, + "offset" : 15 + }, { + "topic" : "topic", + "partition" : 1, + "offset" : 42 + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/POST/http-response.adoc new file mode 100644 index 000000000..ef1420574 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/POST/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/beginning/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/beginning/POST/http-request.adoc new file mode 100644 index 000000000..268640ca6 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/beginning/POST/http-request.adoc @@ -0,0 +1,15 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "partitions" : [ { + "topic" : "topic", + "partition" : 0 + }, { + "topic" : "topic", + "partition" : 1 + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/beginning/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/beginning/POST/http-response.adoc new file mode 100644 index 000000000..ef1420574 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/beginning/POST/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/end/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/end/POST/http-request.adoc new file mode 100644 index 000000000..268640ca6 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/end/POST/http-request.adoc @@ -0,0 +1,15 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "partitions" : [ { + "topic" : "topic", + "partition" : 0 + }, { + "topic" : "topic", + "partition" : 1 + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/end/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/end/POST/http-response.adoc new file mode 100644 index 000000000..ef1420574 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/positions/end/POST/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/records/GET/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/records/GET/http-response.adoc new file mode 100644 index 000000000..efc9ae62e --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/records/GET/http-response.adoc @@ -0,0 +1,71 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +[ { + "topic" : "topic", + "key" : "key1", + "value" : { + "foo" : "bar" + }, + "partition" : 0, + "offset" : 2 +}, { + "topic" : "topic", + "key" : "key2", + "value" : [ "foo2", "bar2" ], + "partition" : 1, + "offset" : 3 +} ] +---- + +[source,json] +---- +[ + { + "topic": "test", + "key": "a2V5", + "value": "Y29uZmx1ZW50", + "partition": 1, + "offset": 100, + }, + { + "topic": "test", + "key": "a2V5", + "value": "a2Fma2E=", + "partition": 2, + "offset": 101, + } +] +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- + + +===== Response 406 +[source,json] +---- +{ + "error_code" : 406, + "message" : "The `format` used in the consumer creation request does not match the embedded format in the Accept header of this request." +} +---- + + +===== Response 422 +[source,json] +---- +{ + "error_code" : 422, + "message" : "Response exceeds the maximum number of bytes the consumer can receive" +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/DELETE/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/DELETE/http-response.adoc new file mode 100644 index 000000000..ef1420574 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/DELETE/http-response.adoc @@ -0,0 +1,10 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/GET/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/GET/http-response.adoc new file mode 100644 index 000000000..d979220ba --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/GET/http-response.adoc @@ -0,0 +1,24 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "topics" : [ "my-topic1", "my-topic2" ], + "partitions" : [ { + "my-topic1" : [ 1, 2, 3 ] + }, { + "my-topic2" : [ 1 ] + } ] +} +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/POST/http-request.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/POST/http-request.adoc new file mode 100644 index 000000000..7a7ed20ec --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/POST/http-request.adoc @@ -0,0 +1,9 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "topics" : [ "topic1", "topic2" ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/POST/http-response.adoc b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/POST/http-response.adoc new file mode 100644 index 000000000..1944b93e0 --- /dev/null +++ b/documentation/book/api/snippet/consumers/{groupid}/instances/{name}/subscription/POST/http-response.adoc @@ -0,0 +1,30 @@ +==== Example HTTP response + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified consumer instance was not found." +} +---- + + +===== Response 409 +[source,json] +---- +{ + "error_code" : 409, + "message" : "Subscriptions to topics, partitions, and patterns are mutually exclusive." +} +---- + + +===== Response 422 +[source,json] +---- +{ + "error_code" : 422, + "message" : "A list (of Topics type) or a topic_pattern must be specified." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/GET/http-response.adoc b/documentation/book/api/snippet/topics/GET/http-response.adoc new file mode 100644 index 000000000..ce5fbfc1a --- /dev/null +++ b/documentation/book/api/snippet/topics/GET/http-response.adoc @@ -0,0 +1,7 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +[ "topic1", "topic2" ] +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/GET/http-response.adoc b/documentation/book/api/snippet/topics/{topicname}/GET/http-response.adoc new file mode 100644 index 000000000..29c6df164 --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/GET/http-response.adoc @@ -0,0 +1,38 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "name" : "topic", + "offset" : 2, + "configs" : { + "cleanup.policy" : "compact" + }, + "partitions" : [ { + "partition" : 1, + "leader" : 1, + "replicas" : [ { + "broker" : 1, + "leader" : true, + "in_sync" : true + }, { + "broker" : 2, + "leader" : false, + "in_sync" : true + } ] + }, { + "partition" : 2, + "leader" : 2, + "replicas" : [ { + "broker" : 1, + "leader" : false, + "in_sync" : true + }, { + "broker" : 2, + "leader" : true, + "in_sync" : true + } ] + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/POST/http-request.adoc b/documentation/book/api/snippet/topics/{topicname}/POST/http-request.adoc new file mode 100644 index 000000000..d791b877f --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/POST/http-request.adoc @@ -0,0 +1,17 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "records" : [ { + "key" : "key1", + "value" : "value1" + }, { + "value" : "value2", + "partition" : 1 + }, { + "value" : "value3" + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/POST/http-response.adoc b/documentation/book/api/snippet/topics/{topicname}/POST/http-response.adoc new file mode 100644 index 000000000..fc9c1f91b --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/POST/http-response.adoc @@ -0,0 +1,38 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "offsets" : [ { + "partition" : 2, + "offset" : 0 + }, { + "partition" : 1, + "offset" : 1 + }, { + "partition" : 2, + "offset" : 2 + } ] +} +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified topic was not found." +} +---- + + +===== Response 422 +[source,json] +---- +{ + "error_code" : 422, + "message" : "The record list contains invalid records." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/partitions/GET/http-response.adoc b/documentation/book/api/snippet/topics/{topicname}/partitions/GET/http-response.adoc new file mode 100644 index 000000000..a5e074faa --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/partitions/GET/http-response.adoc @@ -0,0 +1,41 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +[ { + "partition" : 1, + "leader" : 1, + "replicas" : [ { + "broker" : 1, + "leader" : true, + "in_sync" : true + }, { + "broker" : 2, + "leader" : false, + "in_sync" : true + } ] +}, { + "partition" : 2, + "leader" : 2, + "replicas" : [ { + "broker" : 1, + "leader" : false, + "in_sync" : true + }, { + "broker" : 2, + "leader" : true, + "in_sync" : true + } ] +} ] +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified topic was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/GET/http-response.adoc b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/GET/http-response.adoc new file mode 100644 index 000000000..5750d0031 --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/GET/http-response.adoc @@ -0,0 +1,29 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "partition" : 1, + "leader" : 1, + "replicas" : [ { + "broker" : 1, + "leader" : true, + "in_sync" : true + }, { + "broker" : 2, + "leader" : false, + "in_sync" : true + } ] +} +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified topic partition was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/POST/http-request.adoc b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/POST/http-request.adoc new file mode 100644 index 000000000..ac1c2d120 --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/POST/http-request.adoc @@ -0,0 +1,14 @@ +==== Example HTTP request + +===== Request body +[source,json] +---- +{ + "records" : [ { + "key" : "key1", + "value" : "value1" + }, { + "value" : "value2" + } ] +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/POST/http-response.adoc b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/POST/http-response.adoc new file mode 100644 index 000000000..e65ab5c6f --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/POST/http-response.adoc @@ -0,0 +1,38 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "offsets" : [ { + "partition" : 2, + "offset" : 0 + }, { + "partition" : 1, + "offset" : 1 + }, { + "partition" : 2, + "offset" : 2 + } ] +} +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified topic partition was not found." +} +---- + + +===== Response 422 +[source,json] +---- +{ + "error_code" : 422, + "message" : "The record is not valid." +} +---- \ No newline at end of file diff --git a/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/offsets/GET/http-response.adoc b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/offsets/GET/http-response.adoc new file mode 100644 index 000000000..7d36a6c82 --- /dev/null +++ b/documentation/book/api/snippet/topics/{topicname}/partitions/{partitionid}/offsets/GET/http-response.adoc @@ -0,0 +1,20 @@ +==== Example HTTP response + +===== Response 200 +[source,json] +---- +{ + "beginning_offset" : 10, + "end_offset" : 50 +} +---- + + +===== Response 404 +[source,json] +---- +{ + "error_code" : 404, + "message" : "The specified topic partition was not found." +} +---- \ No newline at end of file diff --git a/documentation/book/api/template/index.mustache b/documentation/book/api/template/index.mustache new file mode 100644 index 000000000..d5c99f496 --- /dev/null +++ b/documentation/book/api/template/index.mustache @@ -0,0 +1,133 @@ +{{#headerAttributes}} +:toc: left +:numbered: +:toclevels: 4 +:source-highlighter: highlightjs +:keywords: openapi, rest, {{appName}} +:specDir: {{specDir}} +:snippetDir: {{snippetDir}} +:generator-template: v1 2019-12-20 +:info-url: {{infoUrl}} +:app-name: {{appName}} +{{/headerAttributes}} + += {{{appName}}} + +{{#useIntroduction}} +== Introduction +{{/useIntroduction}} +{{^useIntroduction}} +[abstract] +.Abstract +{{/useIntroduction}} +{{{appDescription}}} + +{{#specinclude}}intro.adoc{{/specinclude}} + +{{#hasAuthMethods}} +== Access + +{{#authMethods}} +{{#isBasic}} +{{#isBasicBasic}}* *HTTP Basic* Authentication _{{{name}}}_{{/isBasicBasic}} +{{#isBasicBearer}}* *Bearer* Authentication {{/isBasicBearer}} +{{#isHttpSignature}}* *HTTP signature* Authentication{{/isHttpSignature}} +{{/isBasic}} +{{#isOAuth}}* *OAuth* AuthorizationUrl: _{{authorizationUrl}}_, TokenUrl: _{{tokenUrl}}_ {{/isOAuth}} +{{#isApiKey}}* *APIKey* KeyParamName: _{{keyParamName}}_, KeyInQuery: _{{isKeyInQuery}}_, KeyInHeader: _{{isKeyInHeader}}_{{/isApiKey}} +{{/authMethods}} + +{{/hasAuthMethods}} + +== Endpoints + +{{#apiInfo}} +{{#apis}} +{{#operations}} + +[.{{baseName}}] +=== {{baseName}} + +{{#operation}} + +[.{{nickname}}] +{{#useMethodAndPath}} +==== {{httpMethod}} {{path}} + +Operation Id:: {{nickname}} + +{{/useMethodAndPath}} +{{^useMethodAndPath}} +==== {{nickname}} + +`{{httpMethod}} {{path}}` +{{/useMethodAndPath}} + +{{{summary}}} + +===== Description + +{{{notes}}} + +{{#specinclude}}{{path}}/{{httpMethod}}/spec.adoc{{/specinclude}} + + +{{> params}} + +===== Return Type + + +{{#hasReference}} +{{^returnSimpleType}}{{returnContainer}}[{{/returnSimpleType}}<<{{returnBaseType}}>>{{^returnSimpleType}}]{{/returnSimpleType}} +{{/hasReference}} + +{{^hasReference}} +{{#returnType}}<<{{.}}>>{{/returnType}} +{{^returnType}}-{{/returnType}} +{{/hasReference}} + +{{#hasProduces}} +===== Content Type + +{{#produces}} +* {{{mediaType}}} +{{/produces}} +{{/hasProduces}} + +===== Responses + +.HTTP Response Codes +[cols="2,3,1"] +|=== +| Code | Message | Datatype + +{{#responses}} + +| {{code}} +| {{message}} +| {{#containerType}}{{dataType}}[<<{{baseType}}>>]{{/containerType}} {{^containerType}}<<{{dataType}}>>{{/containerType}} + +{{/responses}} +|=== + +{{^skipExamples}} +===== Samples + +{{#snippetinclude}}{{path}}/{{httpMethod}}/http-request.adoc{{/snippetinclude}} +{{#snippetinclude}}{{path}}/{{httpMethod}}/http-response.adoc{{/snippetinclude}} + +{{#snippetlink}}* wiremock data, {{path}}/{{httpMethod}}/{{httpMethod}}.json{{/snippetlink}} +{{/skipExamples}} + +ifdef::internal-generation[] +===== Implementation +{{#specinclude}}{{path}}/{{httpMethod}}/implementation.adoc{{/specinclude}} + +endif::internal-generation[] + +{{/operation}} +{{/operations}} +{{/apis}} +{{/apiInfo}} + +{{> model}} \ No newline at end of file diff --git a/documentation/book/bridge.adoc b/documentation/book/bridge.adoc index 1a42fcaeb..81bf9878f 100644 --- a/documentation/book/bridge.adoc +++ b/documentation/book/bridge.adoc @@ -12,12 +12,6 @@ include::assemblies/assembly-kafka-bridge-quickstart.adoc[leveloffset=+1] include::assemblies/assembly-kafka-bridge-config.adoc[leveloffset=+1] [id='api_reference-{context}'] -include::api/overview.adoc[leveloffset=+1] - -include::api/definitions.adoc[leveloffset=+1] - -include::api/paths.adoc[leveloffset=+1] - -include::api/security.adoc[leveloffset=+1] +include::api/index.adoc[leveloffset=+1] include::common/revision-info.adoc[leveloffset=+1] diff --git a/documentation/modules/con-overview-open-api-spec-kafka-bridge.adoc b/documentation/modules/con-overview-open-api-spec-kafka-bridge.adoc index 51409b089..c369f9a7f 100644 --- a/documentation/modules/con-overview-open-api-spec-kafka-bridge.adoc +++ b/documentation/modules/con-overview-open-api-spec-kafka-bridge.adoc @@ -13,7 +13,7 @@ The Kafka Bridge OpenAPI specification is in JSON format. You can find the OpenAPI JSON files in the `src/main/resources/` folder of the Kafka Bridge source download files. The download files are available from the {ReleaseDownload}. -You can also use the xref:_openapi[`GET /openapi` method] to retrieve the OpenAPI v2 specification in JSON format. +You can also use the xref:openapi[`GET /openapi` method] to retrieve the OpenAPI v2 specification in JSON format. [role="_additional-resources"] .Additional resources diff --git a/documentation/modules/proc-bridge-committing-consumer-offsets-to-log.adoc b/documentation/modules/proc-bridge-committing-consumer-offsets-to-log.adoc index ac804e87f..56839b2f5 100644 --- a/documentation/modules/proc-bridge-committing-consumer-offsets-to-log.adoc +++ b/documentation/modules/proc-bridge-committing-consumer-offsets-to-log.adoc @@ -6,7 +6,7 @@ = Commiting offsets to the log [role="_abstract"] -Use the xref:_commit[offsets] endpoint to manually commit offsets to the log for all messages received by the Kafka Bridge consumer. This is required because the Kafka Bridge consumer that you created earlier, in xref:proc-creating-kafka-bridge-consumer-{context}[Creating a Kafka Bridge consumer], was configured with the `enable.auto.commit` setting as `false`. +Use the xref:commit[offsets] endpoint to manually commit offsets to the log for all messages received by the Kafka Bridge consumer. This is required because the Kafka Bridge consumer that you created earlier, in xref:proc-creating-kafka-bridge-consumer-{context}[Creating a Kafka Bridge consumer], was configured with the `enable.auto.commit` setting as `false`. .Procedure @@ -17,7 +17,7 @@ Use the xref:_commit[offsets] endpoint to manually commit offsets to the log for curl -X POST http://localhost:8080/consumers/bridge-quickstart-consumer-group/instances/bridge-quickstart-consumer/offsets ---- + -Because no request body is submitted, offsets are committed for all the records that have been received by the consumer. Alternatively, the request body can contain an array (xref:_offsetcommitseeklist[OffsetCommitSeekList]) that specifies the topics and partitions that you want to commit offsets for. +Because no request body is submitted, offsets are committed for all the records that have been received by the consumer. Alternatively, the request body can contain an array of (xref:OffsetCommitSeek[OffsetCommitSeek]) that specifies the topics and partitions that you want to commit offsets for. + If the request is successful, the Kafka Bridge returns a `204` code only. @@ -28,4 +28,4 @@ After committing offsets to the log, try out the endpoints for xref:proc-bridge- [role="_additional-resources"] .Additional resources -* xref:_commit[POST /consumers/{groupid}/instances/{name}/offsets] +* xref:commit[POST /consumers/{groupid}/instances/{name}/offsets] diff --git a/documentation/modules/proc-bridge-deleting-consumer.adoc b/documentation/modules/proc-bridge-deleting-consumer.adoc index 65e1718b2..de54bb03d 100644 --- a/documentation/modules/proc-bridge-deleting-consumer.adoc +++ b/documentation/modules/proc-bridge-deleting-consumer.adoc @@ -10,7 +10,7 @@ Delete the Kafka Bridge consumer that you used throughout this quickstart. .Procedure -* Delete the Kafka Bridge consumer by sending a `DELETE` request to the xref:_deleteconsumer[instances] endpoint. +* Delete the Kafka Bridge consumer by sending a `DELETE` request to the xref:deleteconsumer[instances] endpoint. + [source,curl,subs=attributes+] ---- @@ -22,4 +22,4 @@ If the request is successful, the Kafka Bridge returns a `204` code. [role="_additional-resources"] .Additional resources -* xref:_deleteconsumer[DELETE /consumers/{groupid}/instances/{name}] +* xref:deleteconsumer[DELETE /consumers/{groupid}/instances/{name}] diff --git a/documentation/modules/proc-bridge-retrieving-latest-messages-from-consumer.adoc b/documentation/modules/proc-bridge-retrieving-latest-messages-from-consumer.adoc index ff915eac3..54d896f41 100644 --- a/documentation/modules/proc-bridge-retrieving-latest-messages-from-consumer.adoc +++ b/documentation/modules/proc-bridge-retrieving-latest-messages-from-consumer.adoc @@ -6,7 +6,7 @@ = Retrieving the latest messages from a Kafka Bridge consumer [role="_abstract"] -Retrieve the latest messages from the Kafka Bridge consumer by requesting data from the xref:_poll[records] endpoint. In production, HTTP clients can call this endpoint repeatedly (in a loop). +Retrieve the latest messages from the Kafka Bridge consumer by requesting data from the xref:poll[records] endpoint. In production, HTTP clients can call this endpoint repeatedly (in a loop). .Procedure @@ -58,4 +58,4 @@ After retrieving messages from a Kafka Bridge consumer, try xref:proc-bridge-com [role="_additional-resources"] .Additional resources -* xref:_poll[GET /consumers/{groupid}/instances/{name}/records] +* xref:poll[GET /consumers/{groupid}/instances/{name}/records] diff --git a/documentation/modules/proc-bridge-seeking-offsets-for-partition.adoc b/documentation/modules/proc-bridge-seeking-offsets-for-partition.adoc index 12ae55769..7b065b711 100644 --- a/documentation/modules/proc-bridge-seeking-offsets-for-partition.adoc +++ b/documentation/modules/proc-bridge-seeking-offsets-for-partition.adoc @@ -6,7 +6,7 @@ = Seeking to offsets for a partition [role="_abstract"] -Use the xref:_seek[positions] endpoints to configure the Kafka Bridge consumer to retrieve messages for a partition from a specific offset, and then from the latest offset. This is referred to in Apache Kafka as a seek operation. +Use the xref:seek[positions] endpoints to configure the Kafka Bridge consumer to retrieve messages for a partition from a specific offset, and then from the latest offset. This is referred to in Apache Kafka as a seek operation. .Procedure @@ -39,7 +39,7 @@ curl -X GET http://localhost:8080/consumers/bridge-quickstart-consumer-group/ins + The Kafka Bridge returns messages from the offset that you seeked to. -. Restore the default message retrieval behavior by seeking to the last offset for the same partition. This time, use the xref:_seektoend[positions/end] endpoint. +. Restore the default message retrieval behavior by seeking to the last offset for the same partition. This time, use the xref:seektoend[positions/end] endpoint. + [source,curl,subs=attributes+] ---- @@ -57,7 +57,7 @@ curl -X POST http://localhost:8080/consumers/bridge-quickstart-consumer-group/in + If the request is successful, the Kafka Bridge returns another `204` code. -NOTE: You can also use the xref:_seektobeginning[positions/beginning] endpoint to seek to the first offset for one or more partitions. +NOTE: You can also use the xref:seektobeginning[positions/beginning] endpoint to seek to the first offset for one or more partitions. .What to do next @@ -66,6 +66,6 @@ In this quickstart, you have used the Kafka Bridge to perform several common ope [role="_additional-resources"] .Additional resources -* xref:_seek[POST /consumers/{groupid}/instances/{name}/positions] -* xref:_seektobeginning[POST /consumers/{groupid}/instances/{name}/positions/beginning] -* xref:_seektoend[POST /consumers/{groupid}/instances/{name}/positions/end] +* xref:seek[POST /consumers/{groupid}/instances/{name}/positions] +* xref:seektobeginning[POST /consumers/{groupid}/instances/{name}/positions/beginning] +* xref:seektoend[POST /consumers/{groupid}/instances/{name}/positions/end] diff --git a/documentation/modules/proc-bridge-subscribing-consumer-topics.adoc b/documentation/modules/proc-bridge-subscribing-consumer-topics.adoc index 6c9b297b5..a663ca9b3 100644 --- a/documentation/modules/proc-bridge-subscribing-consumer-topics.adoc +++ b/documentation/modules/proc-bridge-subscribing-consumer-topics.adoc @@ -6,7 +6,7 @@ = Subscribing a Kafka Bridge consumer to topics [role="_abstract"] -After you have created a Kafka Bridge consumer, subscribe it to one or more topics by using the xref:_subscribe[subscription] endpoint. +After you have created a Kafka Bridge consumer, subscribe it to one or more topics by using the xref:subscribe[subscription] endpoint. When subscribed, the consumer starts receiving all messages that are produced to the topic. .Procedure @@ -39,4 +39,4 @@ After subscribing a Kafka Bridge consumer to topics, you can xref:proc-bridge-re [role="_additional-resources"] .Additional resources -* xref:_subscribe[POST /consumers/{groupid}/instances/{name}/subscription] +* xref:subscribe[POST /consumers/{groupid}/instances/{name}/subscription] diff --git a/documentation/modules/proc-creating-kafka-bridge-consumer.adoc b/documentation/modules/proc-creating-kafka-bridge-consumer.adoc index 3f44ce617..a3cf5bb0c 100644 --- a/documentation/modules/proc-creating-kafka-bridge-consumer.adoc +++ b/documentation/modules/proc-creating-kafka-bridge-consumer.adoc @@ -6,7 +6,7 @@ = Creating a Kafka Bridge consumer [role="_abstract"] -Before you can perform any consumer operations in the Kafka cluster, you must first create a consumer by using the xref:_createconsumer[consumers] endpoint. The consumer is referred to as a __Kafka Bridge consumer__. +Before you can perform any consumer operations in the Kafka cluster, you must first create a consumer by using the xref:createconsumer[consumers] endpoint. The consumer is referred to as a __Kafka Bridge consumer__. .Procedure @@ -52,4 +52,4 @@ Now that you have created a Kafka Bridge consumer, you can xref:proc-bridge-sub [role="_additional-resources"] .Additional resources -* xref:_createconsumer[POST /consumers/{groupid}] +* xref:createconsumer[POST /consumers/{groupid}] diff --git a/documentation/modules/proc-producing-messages-from-bridge-topics-partitions.adoc b/documentation/modules/proc-producing-messages-from-bridge-topics-partitions.adoc index 8c6f032cd..685d41731 100644 --- a/documentation/modules/proc-producing-messages-from-bridge-topics-partitions.adoc +++ b/documentation/modules/proc-producing-messages-from-bridge-topics-partitions.adoc @@ -8,9 +8,9 @@ [role="_abstract"] Use the Kafka Bridge to produce messages to a Kafka topic in JSON format by using the topics endpoint. -You can produce messages to topics in JSON format by using the xref:_send[topics] endpoint. +You can produce messages to topics in JSON format by using the xref:send[topics] endpoint. You can specify destination partitions for messages in the request body. -The xref:_sendtopartition[partitions] endpoint provides an alternative method for specifying a single destination partition for all messages as a path parameter. +The xref:sendtopartition[partitions] endpoint provides an alternative method for specifying a single destination partition for all messages as a path parameter. In this procedure, messages are produced to a topic called `bridge-quickstart-topic`. @@ -372,5 +372,5 @@ After producing messages to topics and partitions, xref:proc-creating-kafka-brid [role="_additional-resources"] .Additional resources -* xref:_send[POST /topics/{topicname}] -* xref:_sendtopartition[POST /topics/{topicname}/partitions/{partitionid}] +* xref:send[POST /topics/{topicname}] +* xref:sendtopartition[POST /topics/{topicname}/partitions/{partitionid}] diff --git a/pom.xml b/pom.xml index 4d4996079..f96b2fdec 100644 --- a/pom.xml +++ b/pom.xml @@ -120,8 +120,7 @@ 3.3.0 3.0.1 1.7.0 - 1.3.7 - 1.3.4 + 7.8.0 2.16.1 2.16.1 4.7.3 @@ -419,17 +418,6 @@ - - - - false - - jcenter-releases - jcenter - https://jcenter.bintray.com - - - @@ -667,29 +655,30 @@ - io.github.swagger2markup - swagger2markup-maven-plugin - ${swagger2markup-plugin.version} - - - io.github.swagger2markup - swagger2markup - ${swagger2markup.version} - - - - ${project.basedir}/src/main/resources/openapiv2.json - ${project.basedir}/documentation/book/api/ - - ASCIIDOC - - + org.openapitools + openapi-generator-maven-plugin + ${openapi.generator.version} generate-apidoc - convertSwagger2markup + generate + + ${project.basedir}/src/main/resources/openapi.json + ${project.basedir}/documentation/book/api/ + ${project.basedir}/documentation/book/api/template/ + asciidoc + false + false + + true + false + true + true + ./documentation/book/api/snippet/ + + diff --git a/src/main/resources/openapi.json b/src/main/resources/openapi.json index e617327a4..504a2e87b 100644 --- a/src/main/resources/openapi.json +++ b/src/main/resources/openapi.json @@ -1532,17 +1532,7 @@ }, "properties": { "key": { - "oneOf": [ - { - "type": "array" - }, - { - "type": "object" - }, - { - "type": "string" - } - ] + "$ref": "#/components/schemas/RecordKey" }, "offset": { "format": "int64", @@ -1556,16 +1546,7 @@ "type": "string" }, "value": { - "oneOf": [ - { - "type": "object", - "nullable": true - }, - { - "type": "string" - } - ], - "nullable" : true + "$ref": "#/components/schemas/RecordValue" }, "headers": { "$ref": "#/components/schemas/KafkaHeaderList" @@ -1860,32 +1841,10 @@ "type": "integer" }, "value": { - "oneOf": [ - { - "type": "array" - }, - { - "type": "object", - "nullable": true - }, - { - "type": "string" - } - ], - "nullable" : true + "$ref": "#/components/schemas/RecordValue" }, "key": { - "oneOf": [ - { - "type": "array" - }, - { - "type": "object" - }, - { - "type": "string" - } - ] + "$ref": "#/components/schemas/RecordKey" }, "headers": { "$ref": "#/components/schemas/KafkaHeaderList" @@ -1945,30 +1904,10 @@ "type": "object", "properties": { "value": { - "oneOf": [ - { - "type": "array" - }, - { - "type": "object" - }, - { - "type": "string" - } - ] + "$ref": "#/components/schemas/RecordValue" }, "key": { - "oneOf": [ - { - "type": "array" - }, - { - "type": "object" - }, - { - "type": "string" - } - ] + "$ref": "#/components/schemas/RecordKey" }, "headers": { "$ref": "#/components/schemas/KafkaHeaderList" @@ -2220,6 +2159,40 @@ "example": { "bridge_version": "0.16.0" } + }, + "RecordKey": { + "title": "RecordKey", + "description": "Key representation for a record. It can be an array, a JSON object or a string", + "oneOf": [ + { + "type": "array", + "items": {} + }, + { + "type": "object" + }, + { + "type": "string" + } + ] + }, + "RecordValue": { + "title": "RecordValue", + "description": "Value representation for a record. It can be an array, a JSON object or a string", + "oneOf": [ + { + "type": "array", + "items": {} + }, + { + "type": "object", + "nullable": true + }, + { + "type": "string" + } + ], + "nullable" : true } } }, diff --git a/src/main/resources/openapiv2.json b/src/main/resources/openapiv2.json index c5f6430a0..a5e4eab72 100644 --- a/src/main/resources/openapiv2.json +++ b/src/main/resources/openapiv2.json @@ -1747,7 +1747,8 @@ "type": [ "array", "object", - "string" + "string", + "null" ] }, "key": { @@ -2007,6 +2008,25 @@ "example": { "bridge_version": "0.16.0" } + }, + "RecordKey": { + "title": "RecordKey", + "description": "Key representation for a record. It can be an array, a JSON object or a string", + "type": [ + "array", + "object", + "string" + ] + }, + "RecordValue": { + "title": "RecordValue", + "description": "Value representation for a record. It can be an array, a JSON object or a string", + "type": [ + "array", + "object", + "string", + "null" + ] } }, "tags": [ diff --git a/src/test/java/io/strimzi/kafka/bridge/http/OtherServicesIT.java b/src/test/java/io/strimzi/kafka/bridge/http/OtherServicesIT.java index 98e5e8303..7586ea7cb 100644 --- a/src/test/java/io/strimzi/kafka/bridge/http/OtherServicesIT.java +++ b/src/test/java/io/strimzi/kafka/bridge/http/OtherServicesIT.java @@ -160,7 +160,7 @@ void openapiTest(VertxTestContext context) { assertThat(paths.containsKey("/"), is(true)); assertThat(bridgeResponse.getJsonObject("paths").getJsonObject("/").getJsonObject("get").getString("operationId"), is(HttpOpenApiOperations.INFO.toString())); assertThat(paths.containsKey("/karel"), is(false)); - assertThat(bridgeResponse.getJsonObject("definitions").getMap().size(), is(25)); + assertThat(bridgeResponse.getJsonObject("definitions").getMap().size(), is(27)); assertThat(bridgeResponse.getJsonArray("tags").size(), is(4)); }); context.completeNow();