Skip to content

Compilation error in admin/options even though not used or imported #57

Closed
@Dhruv-Garg79

Description

@Dhruv-Garg79

I am getting this compilation error, even though I am only using Consumer and Message import.

Error compiling schema, function code: const schema40 = scope.schema[23];const pattern4 = scope.pattern[2];return function validate28(data, {instancePath="", parentData, parentDataProperty, rootData=data, dynamicAnchors={}}={}){let vErrors = null;let errors = 0;const evaluated0 = validate28.evaluated;if(evaluated0.dynamicProps){evaluated0.props = undefined;}if(evaluated0.dynamicItems){evaluated0.items = undefined;}if(data && typeof data == "object" && !Array.isArray(data)){for(const key0 in data){if(!((key0 === "states") || (key0 === "types"))){const err0 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};if(vErrors === null){vErrors = [err0];}else {vErrors.push(err0);}errors++;}}if(data.states !== undefined){let data0 = data.states;if(Array.isArray(data0)){if(data0.length < 0){const err1 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err1];}else {vErrors.push(err1);}errors++;}const len0 = data0.length;for(let i0=0; i0<len0; i0++){let data1 = data0[i0];if(typeof data1 !== "string"){const err2 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err2];}else {vErrors.push(err2);}errors++;}if(!(((((data1 === "PREPARING_REBALANCE") || (data1 === "COMPLETING_REBALANCE")) || (data1 === "STABLE")) || (data1 === "DEAD")) || (data1 === "EMPTY"))){const err3 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/enum",keyword:"enum",params:{allowedValues: schema40.properties.states.items.enum},message:"must be equal to one of the allowed values"};if(vErrors === null){vErrors = [err3];}else {vErrors.push(err3);}errors++;}if(errors > 0){for(const err4 of vErrors){if((((({"str":"err4"}.keyword !== "errorMessage") && (!{"str":"err4"}.emUsed)) && (({"str":"err4"}.instancePath === instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) || (({"str":"err4"}.instancePath.indexOf(instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) === 0) && ({"str":"err4"}.instancePath[instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}.length] === "/")))) && ({"str":"err4"}.schemaPath.indexOf("#/properties/states/items") === 0)) && ({"str":"err4"}.schemaPath["#/properties/states/items".length] === "/")){{"str":"emErrs0"}.push({"str":"err4"});{"str":"err4"}.emUsed = true;}}if({"str":"emErrs0"}.length){if(vErrors === null){vErrors = [{"str":"err5"}];}else {vErrors.push({"str":"err5"});}errors++;}const emErrs1 = [];for(const err6 of vErrors){if(!{"str":"err6"}.emUsed){{"str":"emErrs1"}.push({"str":"err6"});}}vErrors = emErrs1;errors = {"str":"emErrs1"}.length;}}}else {const err7 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err7];}else {vErrors.push(err7);}errors++;}}if(data.types !== undefined){let data2 = data.types;if(Array.isArray(data2)){if(data2.length < 0){const err8 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err8];}else {vErrors.push(err8);}errors++;}const len1 = data2.length;for(let i1=0; i1<len1; i1++){let data3 = data2[i1];if(typeof data3 === "string"){if(!pattern4.test(data3)){const err9 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/pattern",keyword:"pattern",params:{pattern: "^\\S+$"},message:"must match pattern \""+"^\\S+$"+"\""};if(vErrors === null){vErrors = [err9];}else {vErrors.push(err9);}errors++;}}else {const err10 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err10];}else {vErrors.push(err10);}errors++;}}}else {const err11 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err11];}else {vErrors.push(err11);}errors++;}}}else {const err12 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};if(vErrors === null){vErrors = [err12];}else {vErrors.push(err12);}errors++;}validate28.errors = vErrors;return errors === 0;}
<anonymous_script>:3
const schema40 = scope.schema[23];const pattern4 = scope.pattern[2];return function validate28(data, {instancePath="", parentData, parentDataProperty, rootData=data, dynamicAnchors={}}={}){let vErrors = null;let errors = 0;const evaluated0 = validate28.evaluated;if(evaluated0.dynamicProps){evaluated0.props = undefined;}if(evaluated0.dynamicItems){evaluated0.items = undefined;}if(data && typeof data == "object" && !Array.isArray(data)){for(const key0 in data){if(!((key0 === "states") || (key0 === "types"))){const err0 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};if(vErrors === null){vErrors = [err0];}else {vErrors.push(err0);}errors++;}}if(data.states !== undefined){let data0 = data.states;if(Array.isArray(data0)){if(data0.length < 0){const err1 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err1];}else {vErrors.push(err1);}errors++;}const len0 = data0.length;for(let i0=0; i0<len0; i0++){let data1 = data0[i0];if(typeof data1 !== "string"){const err2 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err2];}else {vErrors.push(err2);}errors++;}if(!(((((data1 === "PREPARING_REBALANCE") || (data1 === "COMPLETING_REBALANCE")) || (data1 === "STABLE")) || (data1 === "DEAD")) || (data1 === "EMPTY"))){const err3 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/enum",keyword:"enum",params:{allowedValues: schema40.properties.states.items.enum},message:"must be equal to one of the allowed values"};if(vErrors === null){vErrors = [err3];}else {vErrors.push(err3);}errors++;}if(errors > 0){for(const err4 of vErrors){if((((({"str":"err4"}.keyword !== "errorMessage") && (!{"str":"err4"}.emUsed)) && (({"str":"err4"}.instancePath === instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) || (({"str":"err4"}.instancePath.indexOf(instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) === 0) && ({"str":"err4"}.instancePath[instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}.length] === "/")))) && ({"str":"err4"}.schemaPath.indexOf("#/properties/states/items") === 0)) && ({"str":"err4"}.schemaPath["#/properties/states/items".length] === "/")){{"str":"emErrs0"}.push({"str":"err4"});{"str":"err4"}.emUsed = true;}}if({"str":"emErrs0"}.length){if(vErrors === null){vErrors = [{"str":"err5"}];}else {vErrors.push({"str":"err5"});}errors++;}const emErrs1 = [];for(const err6 of vErrors){if(!{"str":"err6"}.emUsed){{"str":"emErrs1"}.push({"str":"err6"});}}vErrors = emErrs1;errors = {"str":"emErrs1"}.length;}}}else {const err7 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err7];}else {vErrors.push(err7);}errors++;}}if(data.types !== undefined){let data2 = data.types;if(Array.isArray(data2)){if(data2.length < 0){const err8 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err8];}else {vErrors.push(err8);}errors++;}const len1 = data2.length;for(let i1=0; i1<len1; i1++){let data3 = data2[i1];if(typeof data3 === "string"){if(!pattern4.test(data3)){const err9 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/pattern",keyword:"pattern",params:{pattern: "^\\S+$"},message:"must match pattern \""+"^\\S+$"+"\""};if(vErrors === null){vErrors = [err9];}else {vErrors.push(err9);}errors++;}}else {const err10 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err10];}else {vErrors.push(err10);}errors++;}}}else {const err11 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err11];}else {vErrors.push(err11);}errors++;}}}else {const err12 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};if(vErrors === null){vErrors = [err12];}else {vErrors.push(err12);}errors++;}validate28.errors = vErrors;return errors === 0;}
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                            

SyntaxError: Unexpected token ':'
    at new Function (<anonymous>)
    at Ajv2020.compileSchema (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/compile/index.js:89:30)
    at Ajv2020._compileSchemaEnv (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/core.js:473:37)
    at Ajv2020.compile (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/core.js:160:38)
    at file:///Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/dist/clients/admin/options.js:79:47
    at ModuleJobSync.runSync (node:internal/modules/esm/module_job:400:35)
    at ModuleLoader.importSyncForRequire (node:internal/modules/esm/loader:427:47)
    at loadESMFromCJS (node:internal/modules/cjs/loader:1561:24)
    at Module._compile (node:internal/modules/cjs/loader:1712:5)
    at Object..js (node:internal/modules/cjs/loader:1895:10)

How I am using it

import { Consumer, Message } from '@platformatic/kafka';

                const consumer = new Consumer({
			bootstrapBrokers: [envConfig.kafka.bootstrapServers],
			sasl: {
				mechanism: 'PLAIN',
				username: envConfig.kafka.saslUsername,
				password: envConfig.kafka.saslPassword,
			},
			clientId: envConfig.serviceName,
			groupId: group,
			autocommit: false,
			autocreateTopics: false,
			connectTimeout: 3000,
			heartbeatInterval: 5000,
			highWaterMark: 1024, // assuming max 1kb msg size, it will only take max on 1MB
			isolationLevel: 'READ_COMMITTED',
			minBytes: 500,
			maxBytes: maxBytes,
			maxInflights: 1000,
			maxWaitTime: 5000,
			retries: 3,
			sessionTimeout: 35000,
		});

		consumer.addListener('consumer:group:join', args => console.log(args));
		consumer.addListener('consumer:group:leave', args => console.log(args));
		consumer.addListener('consumer:group:rejoin', args => console.log(args));
		consumer.addListener('consumer:group:rebalance', args => console.log(args));
		consumer.addListener('consumer:heartbeat:error', args => console.log(args));

		const stream = await consumer.consume({
			topics: [topic],
			mode: 'latest',
		});

		let i = 0;
		let flushing = false;
		const batch: Message[] = Array(batchSize);
		let timer: NodeJS.Timeout | undefined = undefined;

		async function flush() {
			if (flushing || i === 0) return;

			i = 0;
			flushing = true;
			clearTimeout(timer);
			stream.pause();

			try {
				await batchHandler(batch);
			} catch (err) {
				logger.error('batch failed—nothing committed %j', err);
			} finally {
				flushing = false;
				stream.resume();
			}
		}

		stream.on('data', message => {
			batch[i++] = message;

			if (i === 1) timer = setTimeout(flush, MAX_WAIT_MS);
			else if (i >= batchSize) flush();
		});

@platformatic/kafka: ^1.6.0
node version: v22.15.0

Activity

mcollina

mcollina commented on Jun 20, 2025

@mcollina
Member

Can you please include a self-contained example? I tried to adapt your example and I cannot reproduce. Create a repo including a docker-compose to spin up a kafka with the settings you are using.

import { Consumer } from './src/index.ts'
import type { Message } from './src/index.ts'

const consumer = new Consumer({
  bootstrapBrokers: ['localhost:9092'],
  clientId: 'aaa',
  groupId: 'aaa',
  autocommit: false,
  autocreateTopics: false,
  connectTimeout: 3000,
  heartbeatInterval: 5000,
  highWaterMark: 1024, // assuming max 1kb msg size, it will only take max on 1MB
  isolationLevel: 'READ_COMMITTED',
  minBytes: 500,
  maxBytes: 1000000, // 1MB
  maxInflights: 1000,
  maxWaitTime: 5000,
  retries: 3,
  sessionTimeout: 35000,
})

consumer.addListener('consumer:group:join', args => console.log(args))
consumer.addListener('consumer:group:leave', args => console.log(args))
consumer.addListener('consumer:group:rejoin', args => console.log(args))
consumer.addListener('consumer:group:rebalance', args => console.log(args))
consumer.addListener('consumer:heartbeat:error', args => console.log(args))

const stream = await consumer.consume({
  topics: ['aaa'],
  mode: 'latest',
})

let i = 0
let flushing = false
const batch: Message[] = Array(42)
let timer: NodeJS.Timeout | undefined

async function flush () {
  if (flushing || i === 0) return

  i = 0
  flushing = true
  clearTimeout(timer)
  stream.pause()

  try {
    console.log('batch', batch.length, batch[0].value.toString())
  } finally {
    flushing = false
    stream.resume()
  }
}

stream.on('data', message => {
  batch[i++] = message

  if (i === 1) timer = setTimeout(flush, 1000)
  else if (i >= 42) flush()
})
Dhruv-Garg79

Dhruv-Garg79 commented on Jun 20, 2025

@Dhruv-Garg79
Author

I am getting the same error when running as a standalone JS script.

import { Consumer } from '@platformatic/kafka';

dotenv.config({ path: '.env' });

const consumer = new Consumer({
	bootstrapBrokers: [process.env.KAFKA_BOOTSTRAP_SERVERS],
	sasl: {
		mechanism: 'PLAIN',
		username: process.env.KAFKA_SASL_USERNAME,
		password: process.env.KAFKA_SASL_PASSWORD,
	},
	clientId: 'aaa',
	groupId: 'aaa',
	autocommit: false,
	autocreateTopics: false,
	connectTimeout: 3000,
	heartbeatInterval: 5000,
	highWaterMark: 1024, // assuming max 1kb msg size, it will only take max on 1MB
	isolationLevel: 'READ_COMMITTED',
	minBytes: 500,
	maxBytes: 1000000, // 1MB
	maxInflights: 1000,
	maxWaitTime: 5000,
	retries: 3,
	sessionTimeout: 35000,
});

consumer.addListener('consumer:group:join', args => console.log(args));
consumer.addListener('consumer:group:leave', args => console.log(args));
consumer.addListener('consumer:group:rejoin', args => console.log(args));
consumer.addListener('consumer:group:rebalance', args => console.log(args));
consumer.addListener('consumer:heartbeat:error', args => console.log(args));

const stream = await consumer.consume({
	topics: ['test_users'],
	mode: 'latest',
});

let i = 0;
let flushing = false;
const batch = Array(42);
let timer = undefined;

async function flush() {
	if (flushing || i === 0) return;

	i = 0;
	flushing = true;
	clearTimeout(timer);
	stream.pause();

	try {
		console.log('batch', batch.length, batch[0].value.toString());
	} finally {
		flushing = false;
		stream.resume();
	}
}

stream.on('data', message => {
	batch[i++] = message;

	if (i === 1) timer = setTimeout(flush, 1000);
	else if (i >= 42) flush();
});

Output:

❯ node testPlatformKafka.js
(node:73882) [MODULE_TYPELESS_PACKAGE_JSON] Warning: Module type of file:///Users/dhruv/Programs/nudge/api-server/testPlatformKafka.js is not specified and it doesn't parse as CommonJS.
Reparsing as ES module because module syntax was detected. This incurs a performance overhead.
To eliminate this warning, add "type": "module" to /Users/dhruv/Programs/nudge/api-server/package.json.
(Use `node --trace-warnings ...` to show where the warning was created)
Error compiling schema, function code: const schema40 = scope.schema[23];const pattern4 = scope.pattern[2];return function validate28(data, {instancePath="", parentData, parentDataProperty, rootData=data, dynamicAnchors={}}={}){let vErrors = null;let errors = 0;const evaluated0 = validate28.evaluated;if(evaluated0.dynamicProps){evaluated0.props = undefined;}if(evaluated0.dynamicItems){evaluated0.items = undefined;}if(data && typeof data == "object" && !Array.isArray(data)){for(const key0 in data){if(!((key0 === "states") || (key0 === "types"))){const err0 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};if(vErrors === null){vErrors = [err0];}else {vErrors.push(err0);}errors++;}}if(data.states !== undefined){let data0 = data.states;if(Array.isArray(data0)){if(data0.length < 0){const err1 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err1];}else {vErrors.push(err1);}errors++;}const len0 = data0.length;for(let i0=0; i0<len0; i0++){let data1 = data0[i0];if(typeof data1 !== "string"){const err2 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err2];}else {vErrors.push(err2);}errors++;}if(!(((((data1 === "PREPARING_REBALANCE") || (data1 === "COMPLETING_REBALANCE")) || (data1 === "STABLE")) || (data1 === "DEAD")) || (data1 === "EMPTY"))){const err3 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/enum",keyword:"enum",params:{allowedValues: schema40.properties.states.items.enum},message:"must be equal to one of the allowed values"};if(vErrors === null){vErrors = [err3];}else {vErrors.push(err3);}errors++;}if(errors > 0){for(const err4 of vErrors){if((((({"str":"err4"}.keyword !== "errorMessage") && (!{"str":"err4"}.emUsed)) && (({"str":"err4"}.instancePath === instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) || (({"str":"err4"}.instancePath.indexOf(instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) === 0) && ({"str":"err4"}.instancePath[instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}.length] === "/")))) && ({"str":"err4"}.schemaPath.indexOf("#/properties/states/items") === 0)) && ({"str":"err4"}.schemaPath["#/properties/states/items".length] === "/")){{"str":"emErrs0"}.push({"str":"err4"});{"str":"err4"}.emUsed = true;}}if({"str":"emErrs0"}.length){if(vErrors === null){vErrors = [{"str":"err5"}];}else {vErrors.push({"str":"err5"});}errors++;}const emErrs1 = [];for(const err6 of vErrors){if(!{"str":"err6"}.emUsed){{"str":"emErrs1"}.push({"str":"err6"});}}vErrors = emErrs1;errors = {"str":"emErrs1"}.length;}}}else {const err7 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err7];}else {vErrors.push(err7);}errors++;}}if(data.types !== undefined){let data2 = data.types;if(Array.isArray(data2)){if(data2.length < 0){const err8 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err8];}else {vErrors.push(err8);}errors++;}const len1 = data2.length;for(let i1=0; i1<len1; i1++){let data3 = data2[i1];if(typeof data3 === "string"){if(!pattern4.test(data3)){const err9 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/pattern",keyword:"pattern",params:{pattern: "^\\S+$"},message:"must match pattern \""+"^\\S+$"+"\""};if(vErrors === null){vErrors = [err9];}else {vErrors.push(err9);}errors++;}}else {const err10 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err10];}else {vErrors.push(err10);}errors++;}}}else {const err11 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err11];}else {vErrors.push(err11);}errors++;}}}else {const err12 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};if(vErrors === null){vErrors = [err12];}else {vErrors.push(err12);}errors++;}validate28.errors = vErrors;return errors === 0;}
<anonymous_script>:3
const schema40 = scope.schema[23];const pattern4 = scope.pattern[2];return function validate28(data, {instancePath="", parentData, parentDataProperty, rootData=data, dynamicAnchors={}}={}){let vErrors = null;let errors = 0;const evaluated0 = validate28.evaluated;if(evaluated0.dynamicProps){evaluated0.props = undefined;}if(evaluated0.dynamicItems){evaluated0.items = undefined;}if(data && typeof data == "object" && !Array.isArray(data)){for(const key0 in data){if(!((key0 === "states") || (key0 === "types"))){const err0 = {instancePath,schemaPath:"#/additionalProperties",keyword:"additionalProperties",params:{additionalProperty: key0},message:"must NOT have additional properties"};if(vErrors === null){vErrors = [err0];}else {vErrors.push(err0);}errors++;}}if(data.states !== undefined){let data0 = data.states;if(Array.isArray(data0)){if(data0.length < 0){const err1 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err1];}else {vErrors.push(err1);}errors++;}const len0 = data0.length;for(let i0=0; i0<len0; i0++){let data1 = data0[i0];if(typeof data1 !== "string"){const err2 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err2];}else {vErrors.push(err2);}errors++;}if(!(((((data1 === "PREPARING_REBALANCE") || (data1 === "COMPLETING_REBALANCE")) || (data1 === "STABLE")) || (data1 === "DEAD")) || (data1 === "EMPTY"))){const err3 = {instancePath:instancePath+"/states/" + i0,schemaPath:"#/properties/states/items/enum",keyword:"enum",params:{allowedValues: schema40.properties.states.items.enum},message:"must be equal to one of the allowed values"};if(vErrors === null){vErrors = [err3];}else {vErrors.push(err3);}errors++;}if(errors > 0){for(const err4 of vErrors){if((((({"str":"err4"}.keyword !== "errorMessage") && (!{"str":"err4"}.emUsed)) && (({"str":"err4"}.instancePath === instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) || (({"str":"err4"}.instancePath.indexOf(instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}) === 0) && ({"str":"err4"}.instancePath[instancePath+{"_items":["\"/states/\" + ",{"str":"i0"},""]}.length] === "/")))) && ({"str":"err4"}.schemaPath.indexOf("#/properties/states/items") === 0)) && ({"str":"err4"}.schemaPath["#/properties/states/items".length] === "/")){{"str":"emErrs0"}.push({"str":"err4"});{"str":"err4"}.emUsed = true;}}if({"str":"emErrs0"}.length){if(vErrors === null){vErrors = [{"str":"err5"}];}else {vErrors.push({"str":"err5"});}errors++;}const emErrs1 = [];for(const err6 of vErrors){if(!{"str":"err6"}.emUsed){{"str":"emErrs1"}.push({"str":"err6"});}}vErrors = emErrs1;errors = {"str":"emErrs1"}.length;}}}else {const err7 = {instancePath:instancePath+"/states",schemaPath:"#/properties/states/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err7];}else {vErrors.push(err7);}errors++;}}if(data.types !== undefined){let data2 = data.types;if(Array.isArray(data2)){if(data2.length < 0){const err8 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/minItems",keyword:"minItems",params:{limit: 0},message:"must NOT have fewer than 0 items"};if(vErrors === null){vErrors = [err8];}else {vErrors.push(err8);}errors++;}const len1 = data2.length;for(let i1=0; i1<len1; i1++){let data3 = data2[i1];if(typeof data3 === "string"){if(!pattern4.test(data3)){const err9 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/pattern",keyword:"pattern",params:{pattern: "^\\S+$"},message:"must match pattern \""+"^\\S+$"+"\""};if(vErrors === null){vErrors = [err9];}else {vErrors.push(err9);}errors++;}}else {const err10 = {instancePath:instancePath+"/types/" + i1,schemaPath:"#/properties/types/items/type",keyword:"type",params:{type: "string"},message:"must be string"};if(vErrors === null){vErrors = [err10];}else {vErrors.push(err10);}errors++;}}}else {const err11 = {instancePath:instancePath+"/types",schemaPath:"#/properties/types/type",keyword:"type",params:{type: "array"},message:"must be array"};if(vErrors === null){vErrors = [err11];}else {vErrors.push(err11);}errors++;}}}else {const err12 = {instancePath,schemaPath:"#/type",keyword:"type",params:{type: "object"},message:"must be object"};if(vErrors === null){vErrors = [err12];}else {vErrors.push(err12);}errors++;}validate28.errors = vErrors;return errors === 0;}
                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                            

SyntaxError: Unexpected token ':'
    at new Function (<anonymous>)
    at Ajv2020.compileSchema (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/compile/index.js:89:30)
    at Ajv2020._compileSchemaEnv (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/core.js:473:37)
    at Ajv2020.compile (/Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/node_modules/ajv/dist/core.js:160:38)
    at file:///Users/dhruv/Programs/nudge/api-server/node_modules/@platformatic/kafka/dist/clients/admin/options.js:79:47
    at ModuleJob.run (node:internal/modules/esm/module_job:274:25)
    at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26)
    at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:116:5)

Node.js v22.15.0

I am using confluent kafka cluster for testing. And the package was installed using bun.
Can you try running the same script with the released version?

shumstra

shumstra commented on Jun 22, 2025

@shumstra

I am having a similar experience trying out the basic examples from the README.md within a unit test using Bun 1.2.13 as my runtime and a fresh install of Kafka 4.0.0 on macOS 15.4.

I'm using 1.6.0, but downgrading to 1.5.0, 1.4.0 or 1.3.0 yields similar results.

A bit of digging reveals that once line 79 of @platformatic/kafka/dist/clients/admin/options.js calls ajv.compile(), the ajv attempts to generate some JavaScript code, but ends up generating garbage. In particular, it generates this delightful nugget of insane JavaScript: {"str":"emErrs0"}.push({"str":"err4"}).

Line 79 of options.js runs immediately on import, so when this triggers, it triggers no matter which part of the library you attempt to use.

Edit: ...so this might actually be a bug in the ajv package, which hasn't seen a publish for a year and hasn't had a PR merged since Dec 2024. Our options here seem to be:

A) figure out a workaround around ajv's code generation bug within this kafka package, or
B) switch to a different way of validating options objects passed to the library than an unmaintained code-generating JSON schema validator.

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 23, 2025

@Dhruv-Garg79
Author

I would prefer it not to have any validation or serialisation built in. It needs to provide us with a reliable and high-performance interface for the producer and consumer; everything else we can take care of.

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 24, 2025

@Dhruv-Garg79
Author

@ShogunPanda can you please check

ShogunPanda

ShogunPanda commented on Jun 24, 2025

@ShogunPanda
Contributor

I tried to reproduce without success. I also specified strict: true. My guess is that in your application another, incompatible, version of ajv is being loaded. Can you please share the dependencies and devDependencies part of your package.json?

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 24, 2025

@Dhruv-Garg79
Author
"dependencies": {
		"@aws-sdk/client-kms": "^3.835.0",
		"@aws-sdk/client-lambda": "^3.835.0",
		"@aws-sdk/client-s3": "^3.835.0",
		"@aws-sdk/client-sqs": "^3.835.0",
		"@aws-sdk/s3-request-presigner": "^3.835.0",
		"@clickhouse/client": "^1.11.2",
		"@confluentinc/kafka-javascript": "^1.3.2",
		"@fast-csv/format": "^5.0.2",
		"@fast-csv/parse": "^5.0.2",
		"@mongodb-js/zstd": "^2.0.1",
		"@node-rs/xxhash": "^1.7.6",
		"@opentelemetry/api": "^1.9.0",
		"@opentelemetry/exporter-trace-otlp-grpc": "^0.202.0",
		"@opentelemetry/resources": "^2.0.1",
		"@opentelemetry/sdk-trace-node": "^2.0.1",
		"@opentelemetry/semantic-conventions": "^1.34.0",
		"@ovotech/avro-decimal": "^0.1.5",
		"@platformatic/kafka": "^1.6.0",
		"@sinclair/typebox": "^0.34.37",
		"aws4": "^1.13.2",
		"axios": "^1.10.0",
		"bullmq": "^5.56.0",
		"cassandra-driver": "^4.8.0",
		"dayjs": "^1.11.13",
		"dotenv": "^16.5.0",
		"firebase-admin": "^13.4.0",
		"ioredis": "^5.6.1",
		"jose": "^5.10.0",
		"jstat": "^1.9.6",
		"nodemailer": "^7.0.3",
		"pino": "^9.7.0",
		"pino-opentelemetry-transport": "^1.0.1",
		"pino-pretty": "^13.0.0",
		"postgres": "^3.4.7",
		"uWebSockets.js": "uNetworking/uWebSockets.js#v20.52.0"
	},
	"devDependencies": {
		"@swc/core": "^1.12.6",
		"@swc/jest": "^0.2.38",
		"@types/jest": "^29.5.14",
		"@types/node": "^22.15.32",
		"@types/nodemailer": "^6.4.17",
		"eslint": "^9.29.0",
		"husky": "^9.1.7",
		"jest": "^29.7.0",
		"lint-staged": "^16.1.2",
		"nodemon": "^3.1.10",
		"prettier": "^3.6.0",
		"skott": "^0.35.4",
		"typescript": "5.8.3",
		"typescript-eslint": "^8.35.0"
	}

you are right, @eslint/eslintrc is loading "ajv": "^6.12.4".
I am using bun as package manager which does not support nested resolution as of now. so that's a bummer.

ShogunPanda

ShogunPanda commented on Jun 24, 2025

@ShogunPanda
Contributor

I see. Unfortunately we don't support bun or its package manager. Closing this.

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 24, 2025

@Dhruv-Garg79
Author

This has little to do with Bun as a package manager. I get that other package managers can handle nested resolution.
Still, it would have been great if this library only handled kafka related stuff well, and users manually handle schema validation and other things themselves using whatever library they want.

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 24, 2025

@Dhruv-Garg79
Author

I also don't understand why this is causing a compilation issue.
Because we are also using typebox, which depends upon ajv@v8, but there are no issues with that.

mcollina

mcollina commented on Jun 24, 2025

@mcollina
Member

@Dhruv-Garg79 please create a repository including all files to reproduce this problem correctly. So far we have not been able to.

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 25, 2025

@Dhruv-Garg79
Author

@mcollina here's the repository to reproduce the issue - https://github.com/Dhruv-Garg79/platformic-kafka-sample

ShogunPanda

ShogunPanda commented on Jun 25, 2025

@ShogunPanda
Contributor

Thanks for the repro.
I finally "found" the issue. It was not from ajv but ajv-errors (ajv-validator/ajv-errors#157), which hasn't been released in three years.

I removed the dependencies and this should fix the issue.

For the record, using npm, yarn or pnpm didn't trigger the issue, using bun install did. So I assume they are doing something under the hood which reproduces it.

Once I released and test it I'll close this issue. Thanks for your patience.

Dhruv-Garg79

Dhruv-Garg79 commented on Jun 25, 2025

@Dhruv-Garg79
Author

@ShogunPanda thanks for fixing the issue. It helps a lot, I will also try to contribute to this amazing project in future.

ShogunPanda

ShogunPanda commented on Jun 26, 2025

@ShogunPanda
Contributor

I can confirm updating to @platformatic/kafka@1.7.0 fixes the issue.

shumstra

shumstra commented on Jun 28, 2025

@shumstra

Thank you @ShogunPanda!! 😃

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

      Development

      Participants

      @mcollina@ShogunPanda@shumstra@Dhruv-Garg79

      Issue actions

        Compilation error in admin/options even though not used or imported · Issue #57 · platformatic/kafka