Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions kernel/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ tokio = { version = "1.47", optional = true, features = ["rt-multi-thread"] }
# both arrow versions below are optional and require object_store
object_store = { version = "0.12.3", optional = true, features = ["aws", "azure", "gcp", "http"] }
comfy-table = { version = "7.1", optional = true }
# used for Float trait in stats computation
num-traits = { version = "0.2", optional = true }

# arrow 56
[dependencies.arrow_56]
Expand Down Expand Up @@ -118,6 +120,7 @@ default-engine-base = [
"arrow-expression",
"futures",
"need-arrow",
"num-traits",
"tokio",
]
# the default-engine-native-tls use the reqwest crate with default features which uses native-tls. if you want
Expand Down
1 change: 1 addition & 0 deletions kernel/src/engine/default/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ pub mod file_stream;
pub mod filesystem;
pub mod json;
pub mod parquet;
pub mod stats;
pub mod storage;

/// Converts a Stream-producing future to a synchronous iterator.
Expand Down
14 changes: 8 additions & 6 deletions kernel/src/engine/default/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,7 @@ impl<E: TaskExecutor> ParquetHandler for DefaultParquetHandler<E> {
/// - `location` - The full URL path where the Parquet file should be written
/// (e.g., `s3://bucket/path/file.parquet`, `file:///path/to/file.parquet`).
/// - `data` - An iterator of engine data to be written to the Parquet file.
/// - `stats_columns` - Column names for which statistics should be collected.
///
/// # Returns
///
Expand All @@ -302,6 +303,7 @@ impl<E: TaskExecutor> ParquetHandler for DefaultParquetHandler<E> {
&self,
location: url::Url,
mut data: Box<dyn Iterator<Item = DeltaResult<Box<dyn EngineData>>> + Send>,
_stats_columns: &[String],
) -> DeltaResult<()> {
let store = self.store.clone();

Expand Down Expand Up @@ -776,7 +778,7 @@ mod tests {
// Test writing through the trait method
let file_url = Url::parse("memory:///test/data.parquet").unwrap();
parquet_handler
.write_parquet_file(file_url.clone(), data_iter)
.write_parquet_file(file_url.clone(), data_iter, &[])
.unwrap();

// Verify we can read the file back
Expand Down Expand Up @@ -964,7 +966,7 @@ mod tests {
// Write the data
let file_url = Url::parse("memory:///roundtrip/test.parquet").unwrap();
parquet_handler
.write_parquet_file(file_url.clone(), data_iter)
.write_parquet_file(file_url.clone(), data_iter, &[])
.unwrap();

// Read it back
Expand Down Expand Up @@ -1152,7 +1154,7 @@ mod tests {

// Write the first file
parquet_handler
.write_parquet_file(file_url.clone(), data_iter1)
.write_parquet_file(file_url.clone(), data_iter1, &[])
.unwrap();

// Create second data set with different data
Expand All @@ -1168,7 +1170,7 @@ mod tests {

// Overwrite with second file (overwrite=true)
parquet_handler
.write_parquet_file(file_url.clone(), data_iter2)
.write_parquet_file(file_url.clone(), data_iter2, &[])
.unwrap();

// Read back and verify it contains the second data set
Expand Down Expand Up @@ -1231,7 +1233,7 @@ mod tests {

// Write the first file
parquet_handler
.write_parquet_file(file_url.clone(), data_iter1)
.write_parquet_file(file_url.clone(), data_iter1, &[])
.unwrap();

// Create second data set
Expand All @@ -1247,7 +1249,7 @@ mod tests {

// Write again - should overwrite successfully (new behavior always overwrites)
parquet_handler
.write_parquet_file(file_url.clone(), data_iter2)
.write_parquet_file(file_url.clone(), data_iter2, &[])
.unwrap();

// Verify the file was overwritten with the new data
Expand Down
Loading
Loading