11package com .snowflake .kafka .connector ;
22
3+ import static com .snowflake .kafka .connector .Constants .KafkaConnectorConfigParams .SNOWFLAKE_TOPICS2TABLE_MAP ;
34import static org .awaitility .Awaitility .await ;
45
6+ import com .fasterxml .jackson .databind .ObjectMapper ;
57import com .snowflake .kafka .connector .Constants .KafkaConnectorConfigParams ;
8+ import com .snowflake .kafka .connector .internal .SnowflakeConnectionService ;
9+ import com .snowflake .kafka .connector .internal .SnowflakeConnectionServiceFactory ;
610import com .snowflake .kafka .connector .internal .TestUtils ;
711import com .snowflake .kafka .connector .internal .streaming .FakeIngestClientSupplier ;
812import com .snowflake .kafka .connector .internal .streaming .FakeSnowflakeStreamingIngestClient ;
13+ import com .snowflake .kafka .connector .internal .streaming .v2 .StreamingClientManager ;
14+ import io .confluent .kafka .schemaregistry .client .SchemaRegistryClient ;
15+ import io .confluent .kafka .schemaregistry .testutil .MockSchemaRegistry ;
16+ import io .confluent .kafka .serializers .KafkaAvroSerializer ;
917import java .time .Duration ;
1018import java .util .HashMap ;
1119import java .util .Map ;
20+ import java .util .Properties ;
21+ import org .apache .kafka .clients .producer .KafkaProducer ;
22+ import org .apache .kafka .clients .producer .ProducerConfig ;
23+ import org .apache .kafka .common .serialization .StringSerializer ;
1224import org .apache .kafka .connect .json .JsonConverter ;
1325import org .apache .kafka .connect .runtime .ConnectorConfig ;
1426import org .apache .kafka .connect .sink .SinkConnector ;
1527import org .apache .kafka .connect .storage .StringConverter ;
1628import org .apache .kafka .connect .util .clusters .EmbeddedConnectCluster ;
1729import org .junit .jupiter .api .AfterAll ;
30+ import org .junit .jupiter .api .AfterEach ;
1831import org .junit .jupiter .api .BeforeAll ;
32+ import org .junit .jupiter .api .BeforeEach ;
1933import org .junit .jupiter .api .TestInstance ;
2034
2135/** Base class for integration tests using an embedded Kafka Connect cluster. */
2236@ TestInstance (TestInstance .Lifecycle .PER_CLASS )
2337public abstract class ConnectClusterBaseIT {
2438
25- protected EmbeddedConnectCluster connectCluster ;
39+ static final String MOCK_SCHEMA_REGISTRY_URL = "mock://test-schema-registry" ;
40+ static final String SCHEMA_REGISTRY_SCOPE = "test-schema-registry" ;
41+ static final int PARTITION_COUNT = 1 ;
42+ static final int RECORD_COUNT = 100 ;
43+ static final int TOPIC_COUNT = 2 ;
44+ static final Integer TASK_NUMBER = 1 ;
45+
2646 protected final FakeIngestClientSupplier fakeClientSupplier = new FakeIngestClientSupplier ();
47+ protected final ObjectMapper objectMapper = new ObjectMapper ();
2748
28- static final Integer TASK_NUMBER = 1 ;
49+ protected String tableName ;
50+ protected String connectorName ;
51+ protected String topic0 ;
52+ protected String topic1 ;
53+ protected EmbeddedConnectCluster connectCluster ;
54+ protected SnowflakeConnectionService snowflake ;
2955
3056 @ BeforeAll
3157 public void beforeAll () {
@@ -51,6 +77,34 @@ public void afterAll() {
5177 }
5278 }
5379
80+ @ BeforeEach
81+ void before () {
82+
83+ tableName = TestUtils .randomTableName ();
84+ connectorName = String .format ("%s_connector" , tableName );
85+ topic0 = tableName + "0" ;
86+ topic1 = tableName + "1" ;
87+ connectCluster .kafka ().createTopic (topic0 , PARTITION_COUNT );
88+ connectCluster .kafka ().createTopic (topic1 , PARTITION_COUNT );
89+ snowflake =
90+ SnowflakeConnectionServiceFactory .builder ()
91+ .setProperties (TestUtils .transformProfileFileToConnectorConfiguration (false ))
92+ .noCaching ()
93+ .build ();
94+
95+ StreamingClientManager .resetIngestClientSupplier ();
96+ }
97+
98+ @ AfterEach
99+ void after () {
100+ connectCluster .kafka ().deleteTopic (topic0 );
101+ connectCluster .kafka ().deleteTopic (topic1 );
102+ connectCluster .deleteConnector (connectorName );
103+ StreamingClientManager .resetIngestClientSupplier ();
104+ TestUtils .dropTable (tableName );
105+ MockSchemaRegistry .dropScope (SCHEMA_REGISTRY_SCOPE );
106+ }
107+
54108 protected FakeSnowflakeStreamingIngestClient getOpenedFakeIngestClient (String connectorName ) {
55109 await ("channelsCreated" )
56110 .atMost (Duration .ofSeconds (60 ))
@@ -115,6 +169,44 @@ protected final void waitForConnectorStopped(String connectorName) {
115169 }
116170 }
117171
172+ protected KafkaProducer <String , Object > createAvroProducer () {
173+ final Properties props = new Properties ();
174+ props .put (ProducerConfig .BOOTSTRAP_SERVERS_CONFIG , connectCluster .kafka ().bootstrapServers ());
175+ props .put (ProducerConfig .KEY_SERIALIZER_CLASS_CONFIG , StringSerializer .class .getName ());
176+ props .put (ProducerConfig .VALUE_SERIALIZER_CLASS_CONFIG , KafkaAvroSerializer .class .getName ());
177+ props .put ("schema.registry.url" , MOCK_SCHEMA_REGISTRY_URL );
178+ return new KafkaProducer <>(props , new StringSerializer (), createAvroSerializer ());
179+ }
180+
181+ protected KafkaAvroSerializer createAvroSerializer () {
182+ final SchemaRegistryClient schemaRegistryClient =
183+ MockSchemaRegistry .getClientForScope (SCHEMA_REGISTRY_SCOPE );
184+ final KafkaAvroSerializer serializer = new KafkaAvroSerializer (schemaRegistryClient );
185+ serializer .configure (Map .of ("schema.registry.url" , MOCK_SCHEMA_REGISTRY_URL ), false );
186+ return serializer ;
187+ }
188+
189+ protected Map <String , String > createConnectorConfig () {
190+ final String topics = topic0 + "," + topic1 ;
191+ final String topicsToTableMap = topic0 + ":" + tableName + "," + topic1 + ":" + tableName ;
192+
193+ final Map <String , String > config = defaultProperties (topics , connectorName );
194+ config .put (SNOWFLAKE_TOPICS2TABLE_MAP , topicsToTableMap );
195+ config .put (ConnectorConfig .KEY_CONVERTER_CLASS_CONFIG , StringConverter .class .getName ());
196+ config .put ("value.converter.schemas.enable" , "false" );
197+ config .put ("errors.tolerance" , "none" );
198+ config .put ("errors.log.enable" , "true" );
199+ config .put ("errors.deadletterqueue.topic.name" , "DLQ_TOPIC" );
200+ config .put ("errors.deadletterqueue.topic.replication.factor" , "1" );
201+ config .put ("jmx" , "true" );
202+ return config ;
203+ }
204+
205+ void sendTombstoneRecords (final String topic ) {
206+ // Send null tombstone
207+ connectCluster .kafka ().produce (topic , null );
208+ }
209+
118210 private FakeSnowflakeStreamingIngestClient getFakeSnowflakeStreamingIngestClient (
119211 String connectorName ) {
120212 return fakeClientSupplier .getFakeIngestClients ().stream ()
0 commit comments