@@ -908,6 +908,21 @@ def workflow_kafka_source_rehydration(c: Composition) -> None:
908
908
)
909
909
910
910
start_time = time .time ()
911
+ c .testdrive (
912
+ dedent (
913
+ f"""
914
+ $ kafka-create-topic topic=kafka-large
915
+ """ ))
916
+ for i in range (repeats ):
917
+ c .testdrive (
918
+ dedent (
919
+ f"""
920
+ $ kafka-ingest format=bytes key-format=bytes key-terminator=: topic=kafka-large repeat={ count }
921
+ key{ i } A,key{ i } ${{kafka-ingest.iteration}}:value{ i } A,${{kafka-ingest.iteration}}
922
+ """
923
+ )
924
+ )
925
+
911
926
c .testdrive (
912
927
dedent (
913
928
f"""
@@ -916,9 +931,6 @@ def workflow_kafka_source_rehydration(c: Composition) -> None:
916
931
> CREATE CONNECTION IF NOT EXISTS kafka_conn FOR KAFKA BROKER '${{testdrive.kafka-addr}}', SECURITY PROTOCOL = 'PLAINTEXT';
917
932
> CREATE CONNECTION IF NOT EXISTS csr_conn FOR CONFLUENT SCHEMA REGISTRY URL '${{testdrive.schema-registry-url}}';
918
933
919
- $ kafka-create-topic topic=kafka-large
920
- $ kafka-ingest format=bytes key-format=bytes key-terminator=: topic=kafka-large repeat={ count }
921
- key0A,key${{kafka-ingest.iteration}}:value0A,${{kafka-ingest.iteration}}
922
934
> CREATE SOURCE kafka_source
923
935
IN CLUSTER cluster
924
936
FROM KAFKA CONNECTION kafka_conn (TOPIC 'testdrive-kafka-large-${{testdrive.seed}}');
@@ -931,24 +943,14 @@ def workflow_kafka_source_rehydration(c: Composition) -> None:
931
943
> CREATE VIEW kafka_source_cnt AS SELECT count(*) FROM kafka_source_tbl
932
944
> CREATE DEFAULT INDEX on kafka_source_cnt
933
945
> SELECT * FROM kafka_source_cnt
934
- { count }
946
+ { count * repeats }
935
947
"""
936
948
)
937
949
)
938
- for i in range (1 , repeats ):
939
- c .testdrive (
940
- dedent (
941
- f"""
942
- $ kafka-ingest format=bytes key-format=bytes key-terminator=: topic=kafka-large repeat={ count }
943
- key{ i } A,key{ i } ${{kafka-ingest.iteration}}:value{ i } A,${{kafka-ingest.iteration}}
944
- > SELECT * FROM kafka_source_cnt
945
- { count * (i + 1 )}
946
- """
947
- )
948
- )
949
950
950
951
elapsed = time .time () - start_time
951
952
print (f"initial ingestion took { elapsed } seconds" )
953
+ time .sleep (30 )
952
954
953
955
with c .override (
954
956
Materialized (
@@ -959,7 +961,16 @@ def workflow_kafka_source_rehydration(c: Composition) -> None:
959
961
restart = "on-failure" ,
960
962
external_metadata_store = True ,
961
963
default_replication_factor = 2 ,
962
- )
964
+ ),
965
+ Testdrive (
966
+ materialize_url = "postgres://materialize@mz_new:6875" ,
967
+ materialize_url_internal = "postgres://materialize@mz_new:6877" ,
968
+ mz_service = "mz_new" ,
969
+ materialize_params = {"cluster" : "cluster" },
970
+ no_reset = True ,
971
+ seed = 1 ,
972
+ default_timeout = DEFAULT_TIMEOUT ,
973
+ ),
963
974
):
964
975
c .up ("mz_new" )
965
976
start_time = time .time ()
@@ -974,6 +985,16 @@ def workflow_kafka_source_rehydration(c: Composition) -> None:
974
985
elapsed = time .time () - start_time
975
986
print (f"promotion took { elapsed } seconds" )
976
987
988
+ for i in range (repeats ):
989
+ c .testdrive (
990
+ dedent (
991
+ f"""
992
+ $ kafka-ingest format=bytes key-format=bytes key-terminator=: topic=kafka-large repeat={ count }
993
+ key{ i } A,key{ i } ${{kafka-ingest.iteration}}:value{ i } A,${{kafka-ingest.iteration}}
994
+ """
995
+ )
996
+ )
997
+
977
998
start_time = time .time ()
978
999
result = c .sql_query ("SELECT 1" , service = "mz_new" )
979
1000
elapsed = time .time () - start_time
0 commit comments