@@ -534,6 +534,7 @@ mod tests {
534534    use  std:: fs:: File ; 
535535    use  std:: sync:: Arc ; 
536536
537+     use  arrow_array:: cast:: AsArray ; 
537538    use  arrow_array:: { ArrayRef ,  Int32Array ,  Int64Array ,  RecordBatch ,  StringArray ,  StructArray } ; 
538539    use  arrow_schema:: { DataType ,  Field ,  Fields } ; 
539540    use  parquet:: arrow:: { ArrowWriter ,  PARQUET_FIELD_ID_META_KEY } ; 
@@ -685,12 +686,13 @@ mod tests {
685686        assert ! ( result. is_none( ) ) ;  // no pos dels for file 3 
686687    } 
687688
688-     /// Verifies that evolve_schema on partial-schema equality deletes fails with Arrow  
689- /// validation errors  when missing REQUIRED  columns are filled with NULLs . 
689+     /// Verifies that evolve_schema on partial-schema equality deletes works correctly  
690+ /// when only equality_ids  columns are evolved, not all table columns . 
690691/// 
691- /// Reproduces the issue that caused 14 TestSparkReaderDeletes failures in Iceberg Java. 
692+ /// Per the [Iceberg spec](https://iceberg.apache.org/spec/#equality-delete-files), 
693+ /// equality delete files can contain only a subset of columns. 
692694#[ tokio:: test]  
693-     async  fn  test_partial_schema_equality_deletes_evolve_fails ( )  { 
695+     async  fn  test_partial_schema_equality_deletes_evolve_succeeds ( )  { 
694696        let  tmp_dir = TempDir :: new ( ) . unwrap ( ) ; 
695697        let  table_location = tmp_dir. path ( ) . as_os_str ( ) . to_str ( ) . unwrap ( ) ; 
696698
@@ -750,23 +752,32 @@ mod tests {
750752            . await 
751753            . unwrap ( ) ; 
752754
753-         let  mut  evolved_stream = BasicDeleteFileLoader :: evolve_schema ( batch_stream,  table_schema) 
754-             . await 
755-             . unwrap ( ) ; 
755+         // Only evolve the equality_ids columns (field 2), not all table columns 
756+         let  equality_ids = vec ! [ 2 ] ; 
757+         let  evolved_stream =
758+             BasicDeleteFileLoader :: evolve_schema ( batch_stream,  table_schema,  & equality_ids) 
759+                 . await 
760+                 . unwrap ( ) ; 
756761
757-         let  result = evolved_stream. next ( ) . await . unwrap ( ) ; 
762+         let  result = evolved_stream. try_collect :: < Vec < _ > > ( ) . await ; 
758763
759764        assert ! ( 
760-             result. is_err( ) , 
761-             "Expected error from evolve_schema adding NULL to non-nullable column" 
765+             result. is_ok( ) , 
766+             "Expected success when evolving only equality_ids columns, got error: {:?}" , 
767+             result. err( ) 
762768        ) ; 
763769
764-         let  err = result. unwrap_err ( ) ; 
765-         let  err_msg = err. to_string ( ) ; 
766-         assert ! ( 
767-             err_msg. contains( "non-nullable" )  || err_msg. contains( "null values" ) , 
768-             "Expected null value error, got: {}" , 
769-             err_msg
770-         ) ; 
770+         let  batches = result. unwrap ( ) ; 
771+         assert_eq ! ( batches. len( ) ,  1 ) ; 
772+ 
773+         let  batch = & batches[ 0 ] ; 
774+         assert_eq ! ( batch. num_rows( ) ,  3 ) ; 
775+         assert_eq ! ( batch. num_columns( ) ,  1 ) ;  // Only 'data' column 
776+ 
777+         // Verify the actual values are preserved after schema evolution 
778+         let  data_col = batch. column ( 0 ) . as_string :: < i32 > ( ) ; 
779+         assert_eq ! ( data_col. value( 0 ) ,  "a" ) ; 
780+         assert_eq ! ( data_col. value( 1 ) ,  "d" ) ; 
781+         assert_eq ! ( data_col. value( 2 ) ,  "g" ) ; 
771782    } 
772783} 
0 commit comments