Closed
Description
I am getting this compilation error, even though I am only using Consumer and Message import.
Error compiling schema, function code: const schema40 = scope.schema[23];const pattern4 = scope.pattern[2];return function validate28(data, {instancePath="", parentData, parentDataProperty, rootData=data, dynamicAnchors={}}={}){let vErrors = null;let errors = 0;const evaluated0 = validate28.evaluated;if(evaluated0.dynamicProps){evaluated0.props = undefined;}if(evaluated0.dynamicItems){evaluated0.items = undefined;}if(data && typeof data == "object" && !Array.isArray(data)){for(const key0 in data){if(!((key0 === "states") || (key0 === "types"))){const err0 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};if(vErrors === null){vErrors = [err0];}else {vErrors.push(err0);}errors++;}}if(data.states !== undefined){let data0 = data.states;if(Array.isArray(data0)){if(data0.length < 0){const err1 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err1];}else {vErrors.push(err1);}errors++;}const len0 = data0.length;for(let i0=0; i0<len0; i0++){let data1 = data0[i0];if(typeof data1 !== "string"){const err2 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err2];}else {vErrors.push(err2);}errors++;}if(!(((((data1 === "PREPARING_REBALANCE") || (data1 === "COMPLETING_REBALANCE")) || (data1 === "STABLE")) || (data1 === "DEAD")) || (data1 === "EMPTY"))){const err3 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/enum",keyword:"enum",params:{allowedValues: schema40.properties.states.items.enum},message:"must be equal to one of the allowed values"};if(vErrors === null){vErrors = [err3];}else {vErrors.push(err3);}errors++;}if(errors > 0){for(const err4 of vErrors){if((((({"str":"err4"}.keyword !== "errorMessage") && (!{"str":"err4"}.emUsed)) && (({"str":"err4"}.instancePath === instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) || (({"str":"err4"}.instancePath.indexOf(instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) === 0) && ({"str":"err4"}.instancePath[instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}.length] === "/")))) && ({"str":"err4"}.schemaPath.indexOf("#/properties/states/items") === 0)) && ({"str":"err4"}.schemaPath["#/properties/states/items".length] === "/")){{"str":"emErrs0"}.push({"str":"err4"});{"str":"err4"}.emUsed = true;}}if({"str":"emErrs0"}.length){if(vErrors === null){vErrors = [{"str":"err5"}];}else {vErrors.push({"str":"err5"});}errors++;}const emErrs1 = [];for(const err6 of vErrors){if(!{"str":"err6"}.emUsed){{"str":"emErrs1"}.push({"str":"err6"});}}vErrors = emErrs1;errors = {"str":"emErrs1"}.length;}}}else {const err7 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err7];}else {vErrors.push(err7);}errors++;}}if(data.types !== undefined){let data2 = data.types;if(Array.isArray(data2)){if(data2.length < 0){const err8 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err8];}else {vErrors.push(err8);}errors++;}const len1 = data2.length;for(let i1=0; i1<len1; i1++){let data3 = data2[i1];if(typeof data3 === "string"){if(!pattern4.test(data3)){const err9 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/pattern",keyword:"pattern",params:{pattern: "^\\S+$"},message:"must match pattern \""+"^\\S+$"+"\""};if(vErrors === null){vErrors = [err9];}else {vErrors.push(err9);}errors++;}}else {const err10 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err10];}else {vErrors.push(err10);}errors++;}}}else {const err11 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err11];}else {vErrors.push(err11);}errors++;}}}else {const err12 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};if(vErrors === null){vErrors = [err12];}else {vErrors.push(err12);}errors++;}validate28.errors = vErrors;return errors === 0;}
<anonymous_script>:3
const schema40 = scope.schema[23];const pattern4 = scope.pattern[2];return function validate28(data, {instancePath="", parentData, parentDataProperty, rootData=data, dynamicAnchors={}}={}){let vErrors = null;let errors = 0;const evaluated0 = validate28.evaluated;if(evaluated0.dynamicProps){evaluated0.props = undefined;}if(evaluated0.dynamicItems){evaluated0.items = undefined;}if(data && typeof data == "object" && !Array.isArray(data)){for(const key0 in data){if(!((key0 === "states") || (key0 === "types"))){const err0 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};if(vErrors === null){vErrors = [err0];}else {vErrors.push(err0);}errors++;}}if(data.states !== undefined){let data0 = data.states;if(Array.isArray(data0)){if(data0.length < 0){const err1 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err1];}else {vErrors.push(err1);}errors++;}const len0 = data0.length;for(let i0=0; i0<len0; i0++){let data1 = data0[i0];if(typeof data1 !== "string"){const err2 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err2];}else {vErrors.push(err2);}errors++;}if(!(((((data1 === "PREPARING_REBALANCE") || (data1 === "COMPLETING_REBALANCE")) || (data1 === "STABLE")) || (data1 === "DEAD")) || (data1 === "EMPTY"))){const err3 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/enum",keyword:"enum",params:{allowedValues: schema40.properties.states.items.enum},message:"must be equal to one of the allowed values"};if(vErrors === null){vErrors = [err3];}else {vErrors.push(err3);}errors++;}if(errors > 0){for(const err4 of vErrors){if((((({"str":"err4"}.keyword !== "errorMessage") && (!{"str":"err4"}.emUsed)) && (({"str":"err4"}.instancePath === instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) || (({"str":"err4"}.instancePath.indexOf(instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) === 0) && ({"str":"err4"}.instancePath[instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}.length] === "/")))) && ({"str":"err4"}.schemaPath.indexOf("#/properties/states/items") === 0)) && ({"str":"err4"}.schemaPath["#/properties/states/items".length] === "/")){{"str":"emErrs0"}.push({"str":"err4"});{"str":"err4"}.emUsed = true;}}if({"str":"emErrs0"}.length){if(vErrors === null){vErrors = [{"str":"err5"}];}else {vErrors.push({"str":"err5"});}errors++;}const emErrs1 = [];for(const err6 of vErrors){if(!{"str":"err6"}.emUsed){{"str":"emErrs1"}.push({"str":"err6"});}}vErrors = emErrs1;errors = {"str":"emErrs1"}.length;}}}else {const err7 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err7];}else {vErrors.push(err7);}errors++;}}if(data.types !== undefined){let data2 = data.types;if(Array.isArray(data2)){if(data2.length < 0){const err8 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err8];}else {vErrors.push(err8);}errors++;}const len1 = data2.length;for(let i1=0; i1<len1; i1++){let data3 = data2[i1];if(typeof data3 === "string"){if(!pattern4.test(data3)){const err9 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/pattern",keyword:"pattern",params:{pattern: "^\\S+$"},message:"must match pattern \""+"^\\S+$"+"\""};if(vErrors === null){vErrors = [err9];}else {vErrors.push(err9);}errors++;}}else {const err10 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err10];}else {vErrors.push(err10);}errors++;}}}else {const err11 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err11];}else {vErrors.push(err11);}errors++;}}}else {const err12 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};if(vErrors === null){vErrors = [err12];}else {vErrors.push(err12);}errors++;}validate28.errors = vErrors;return errors === 0;}
SyntaxError: Unexpected token ':'
at new Function (<anonymous>)
at Ajv2020.compileSchema (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/compile/index.js:89:30)
at Ajv2020._compileSchemaEnv (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/core.js:473:37)
at Ajv2020.compile (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/core.js:160:38)
at file:///Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/dist/clients/admin/options.js:79:47
at ModuleJobSync.runSync (node:internal/modules/esm/module_job:400:35)
at ModuleLoader.importSyncForRequire (node:internal/modules/esm/loader:427:47)
at loadESMFromCJS (node:internal/modules/cjs/loader:1561:24)
at Module._compile (node:internal/modules/cjs/loader:1712:5)
at Object..js (node:internal/modules/cjs/loader:1895:10)
How I am using it
import { Consumer, Message } from '@platformatic/kafka';
const consumer = new Consumer({
bootstrapBrokers: [envConfig.kafka.bootstrapServers],
sasl: {
mechanism: 'PLAIN',
username: envConfig.kafka.saslUsername,
password: envConfig.kafka.saslPassword,
},
clientId: envConfig.serviceName,
groupId: group,
autocommit: false,
autocreateTopics: false,
connectTimeout: 3000,
heartbeatInterval: 5000,
highWaterMark: 1024, // assuming max 1kb msg size, it will only take max on 1MB
isolationLevel: 'READ_COMMITTED',
minBytes: 500,
maxBytes: maxBytes,
maxInflights: 1000,
maxWaitTime: 5000,
retries: 3,
sessionTimeout: 35000,
});
consumer.addListener('consumer:group:join', args => console.log(args));
consumer.addListener('consumer:group:leave', args => console.log(args));
consumer.addListener('consumer:group:rejoin', args => console.log(args));
consumer.addListener('consumer:group:rebalance', args => console.log(args));
consumer.addListener('consumer:heartbeat:error', args => console.log(args));
const stream = await consumer.consume({
topics: [topic],
mode: 'latest',
});
let i = 0;
let flushing = false;
const batch: Message[] = Array(batchSize);
let timer: NodeJS.Timeout | undefined = undefined;
async function flush() {
if (flushing || i === 0) return;
i = 0;
flushing = true;
clearTimeout(timer);
stream.pause();
try {
await batchHandler(batch);
} catch (err) {
logger.error('batch failed—nothing committed %j', err);
} finally {
flushing = false;
stream.resume();
}
}
stream.on('data', message => {
batch[i++] = message;
if (i === 1) timer = setTimeout(flush, MAX_WAIT_MS);
else if (i >= batchSize) flush();
});
@platformatic/kafka: ^1.6.0
node version: v22.15.0
Metadata
Metadata
Assignees
Labels
No labels
Activity
mcollina commentedon Jun 20, 2025
Can you please include a self-contained example? I tried to adapt your example and I cannot reproduce. Create a repo including a docker-compose to spin up a kafka with the settings you are using.
Dhruv-Garg79 commentedon Jun 20, 2025
I am getting the same error when running as a standalone JS script.
Output:
I am using confluent kafka cluster for testing. And the package was installed using bun.
Can you try running the same script with the released version?
shumstra commentedon Jun 22, 2025
I am having a similar experience trying out the basic examples from the README.md within a unit test using Bun 1.2.13 as my runtime and a fresh install of Kafka 4.0.0 on macOS 15.4.
I'm using 1.6.0, but downgrading to 1.5.0, 1.4.0 or 1.3.0 yields similar results.
A bit of digging reveals that once line 79 of @platformatic/kafka/dist/clients/admin/options.js calls ajv.compile(), the ajv attempts to generate some JavaScript code, but ends up generating garbage. In particular, it generates this delightful nugget of insane JavaScript:
{"str":"emErrs0"}.push({"str":"err4"})
.Line 79 of options.js runs immediately on import, so when this triggers, it triggers no matter which part of the library you attempt to use.
Edit: ...so this might actually be a bug in the ajv package, which hasn't seen a publish for a year and hasn't had a PR merged since Dec 2024. Our options here seem to be:
A) figure out a workaround around ajv's code generation bug within this kafka package, or
B) switch to a different way of validating options objects passed to the library than an unmaintained code-generating JSON schema validator.
Dhruv-Garg79 commentedon Jun 23, 2025
I would prefer it not to have any validation or serialisation built in. It needs to provide us with a reliable and high-performance interface for the producer and consumer; everything else we can take care of.
Dhruv-Garg79 commentedon Jun 24, 2025
@ShogunPanda can you please check
ShogunPanda commentedon Jun 24, 2025
I tried to reproduce without success. I also specified
strict: true
. My guess is that in your application another, incompatible, version ofajv
is being loaded. Can you please share thedependencies
anddevDependencies
part of your package.json?Dhruv-Garg79 commentedon Jun 24, 2025
you are right,
@eslint/eslintrc
is loading"ajv": "^6.12.4"
.I am using bun as package manager which does not support nested resolution as of now. so that's a bummer.
ShogunPanda commentedon Jun 24, 2025
I see. Unfortunately we don't support bun or its package manager. Closing this.
Dhruv-Garg79 commentedon Jun 24, 2025
This has little to do with Bun as a package manager. I get that other package managers can handle nested resolution.
Still, it would have been great if this library only handled kafka related stuff well, and users manually handle schema validation and other things themselves using whatever library they want.
Dhruv-Garg79 commentedon Jun 24, 2025
I also don't understand why this is causing a compilation issue.
Because we are also using typebox, which depends upon
ajv@v8
, but there are no issues with that.mcollina commentedon Jun 24, 2025
@Dhruv-Garg79 please create a repository including all files to reproduce this problem correctly. So far we have not been able to.
Dhruv-Garg79 commentedon Jun 25, 2025
@mcollina here's the repository to reproduce the issue - https://github.com/Dhruv-Garg79/platformic-kafka-sample
ShogunPanda commentedon Jun 25, 2025
Thanks for the repro.
I finally "found" the issue. It was not from ajv but ajv-errors (ajv-validator/ajv-errors#157), which hasn't been released in three years.
I removed the dependencies and this should fix the issue.
For the record, using
npm
,yarn
orpnpm
didn't trigger the issue, usingbun install
did. So I assume they are doing something under the hood which reproduces it.Once I released and test it I'll close this issue. Thanks for your patience.
Dhruv-Garg79 commentedon Jun 25, 2025
@ShogunPanda thanks for fixing the issue. It helps a lot, I will also try to contribute to this amazing project in future.
ShogunPanda commentedon Jun 26, 2025
I can confirm updating to
@platformatic/kafka@1.7.0
fixes the issue.shumstra commentedon Jun 28, 2025
Thank you @ShogunPanda!! 😃