File tree Expand file tree Collapse file tree 1 file changed +14
-19
lines changed Expand file tree Collapse file tree 1 file changed +14
-19
lines changed Original file line number Diff line number Diff line change @@ -332,25 +332,19 @@ pub async fn create_chunk(
332
332
let dataset_config =
333
333
DatasetConfiguration :: from_json ( dataset_org_plan_sub. dataset . server_configuration . clone ( ) ) ;
334
334
335
- let ( upsert_chunks, non_upsert_chunks) = chunks
336
- . iter ( )
337
- . filter_map ( |chunk| {
338
- if !chunk. upsert_by_tracking_id . unwrap_or ( false ) {
339
- let non_empty_tracking_id = chunk
340
- . tracking_id
341
- . clone ( )
342
- . filter ( |tracking_id| !tracking_id. is_empty ( ) ) ;
343
- let new_chunk = ChunkReqPayload {
344
- tracking_id : non_empty_tracking_id,
345
- ..chunk. clone ( )
346
- } ;
347
-
348
- Some ( new_chunk)
349
- } else {
350
- None
351
- }
352
- } )
353
- . partition ( |chunk| chunk. upsert_by_tracking_id . unwrap_or ( false ) ) ;
335
+ let chunks = chunks. into_iter ( ) . map ( |chunk| {
336
+ let non_empty_tracking_id = chunk
337
+ . tracking_id
338
+ . clone ( )
339
+ . filter ( |tracking_id| !tracking_id. is_empty ( ) ) ;
340
+ ChunkReqPayload {
341
+ tracking_id : non_empty_tracking_id,
342
+ ..chunk. clone ( )
343
+ }
344
+ } ) ;
345
+
346
+ let ( upsert_chunks, non_upsert_chunks) : ( Vec < ChunkReqPayload > , Vec < ChunkReqPayload > ) =
347
+ chunks. partition ( |chunk| chunk. upsert_by_tracking_id . unwrap_or ( false ) ) ;
354
348
355
349
let ( non_upsert_chunk_ingestion_message, non_upsert_chunk_metadatas) = create_chunk_metadata (
356
350
non_upsert_chunks,
@@ -359,6 +353,7 @@ pub async fn create_chunk(
359
353
pool. clone ( ) ,
360
354
)
361
355
. await ?;
356
+
362
357
let ( upsert_chunk_ingestion_message, upsert_chunk_metadatas) = create_chunk_metadata (
363
358
upsert_chunks,
364
359
dataset_org_plan_sub. dataset . id ,
You can’t perform that action at this time.
0 commit comments