Skip to content

Commit e0c211a

Browse files
committed
Hack on it until the tests compile
1 parent 3690ed6 commit e0c211a

File tree

6 files changed

+16
-23
lines changed

6 files changed

+16
-23
lines changed

Diff for: kernel/src/arrow.rs

+1-5
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,10 @@
11
//! This module exists to help re-export the version of arrow used by default-engine and other
22
//! parts of kernel that need arrow
33
4-
54
#[cfg(all(feature = "arrow_55"))]
65
pub use arrow_55::*;
76

87
// if nothing is enabled but we need arrow because of some other feature flag, default to lowest
98
// supported version
10-
#[cfg(all(
11-
feature = "need_arrow",
12-
not(feature = "arrow_55")
13-
))]
9+
#[cfg(all(feature = "need_arrow", not(feature = "arrow_55")))]
1410
compile_error!("Requested a feature that needs arrow without enabling arrow. Please enable the `arrow_53` or `arrow_54` feature");

Diff for: kernel/src/engine/default/filesystem.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ impl<E: TaskExecutor> FileSystemClient for ObjectStoreFileSystemClient<E> {
6969
.send(Ok(FileMeta {
7070
location,
7171
last_modified: meta.last_modified.timestamp_millis(),
72-
size: meta.size,
72+
size: meta.size as usize,
7373
}))
7474
.ok();
7575
}
@@ -129,6 +129,8 @@ impl<E: TaskExecutor> FileSystemClient for ObjectStoreFileSystemClient<E> {
129129
}
130130
_ => {
131131
if let Some(rng) = range {
132+
// convert to u64 for new API
133+
let rng = rng.start as u64..rng.end as u64;
132134
Ok(store.get_range(&path, rng).await?)
133135
} else {
134136
let result = store.get(&path).await?;

Diff for: kernel/src/engine/default/json.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -446,7 +446,7 @@ mod tests {
446446
&self,
447447
prefix: Option<&Path>,
448448
offset: &Path,
449-
) -> BoxStream<'_, Result<ObjectMeta>> {
449+
) -> BoxStream<'static, Result<ObjectMeta>> {
450450
self.inner.list_with_offset(prefix, offset)
451451
}
452452

@@ -539,7 +539,7 @@ mod tests {
539539
let files = &[FileMeta {
540540
location: url.clone(),
541541
last_modified: meta.last_modified.timestamp_millis(),
542-
size: meta.size,
542+
size: meta.size as usize,
543543
}];
544544

545545
let handler = DefaultJsonHandler::new(store, Arc::new(TokioBackgroundExecutor::new()));
@@ -688,7 +688,7 @@ mod tests {
688688
FileMeta {
689689
location: url,
690690
last_modified: meta.last_modified.timestamp_millis(),
691-
size: meta.size,
691+
size: meta.size as usize,
692692
}
693693
}
694694
})

Diff for: kernel/src/engine/default/parquet.rs

+8-6
Original file line numberDiff line numberDiff line change
@@ -255,8 +255,8 @@ impl FileOpener for ParquetOpener {
255255
Ok(Box::pin(async move {
256256
// TODO avoid IO by converting passed file meta to ObjectMeta
257257
let meta = store.head(&path).await?;
258-
let mut reader = ParquetObjectReader::new(store, meta.location)
259-
.with_file_size(meta.size);
258+
let mut reader =
259+
ParquetObjectReader::new(store, meta.location).with_file_size(meta.size);
260260
let metadata = ArrowReaderMetadata::load_async(&mut reader, Default::default()).await?;
261261
let parquet_schema = metadata.schema();
262262
let (indices, requested_ordering) =
@@ -393,7 +393,8 @@ mod tests {
393393
let location = Path::from(url.path());
394394
let meta = store.head(&location).await.unwrap();
395395

396-
let reader = ParquetObjectReader::new(store.clone(), meta.clone());
396+
let reader =
397+
ParquetObjectReader::new(store.clone(), meta.location).with_file_size(meta.size);
397398
let physical_schema = ParquetRecordBatchStreamBuilder::new(reader)
398399
.await
399400
.unwrap()
@@ -403,7 +404,7 @@ mod tests {
403404
let files = &[FileMeta {
404405
location: url.clone(),
405406
last_modified: meta.last_modified.timestamp(),
406-
size: meta.size,
407+
size: meta.size as usize,
407408
}];
408409

409410
let handler = DefaultParquetHandler::new(store, Arc::new(TokioBackgroundExecutor::new()));
@@ -511,13 +512,14 @@ mod tests {
511512

512513
let filename = location.path().split('/').last().unwrap();
513514
assert_eq!(&expected_location.join(filename).unwrap(), location);
514-
assert_eq!(expected_size, size);
515+
assert_eq!(expected_size, size as u64);
515516
assert!(now - last_modified < 10_000);
516517

517518
// check we can read back
518519
let path = Path::from(location.path());
519520
let meta = store.head(&path).await.unwrap();
520-
let reader = ParquetObjectReader::new(store.clone(), meta.clone());
521+
let reader =
522+
ParquetObjectReader::new(store.clone(), meta.location).with_file_size(meta.size);
521523
let physical_schema = ParquetRecordBatchStreamBuilder::new(reader)
522524
.await
523525
.unwrap()

Diff for: kernel/src/engine/default/storage.rs

-4
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,3 @@ where
4343
*/
4444
todo!("Need to update hdfs object store");
4545
}
46-
47-
48-
49-

Diff for: kernel/src/parquet.rs

+1-4
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,5 @@ pub use parquet_55::*;
66

77
// if nothing is enabled but we need arrow because of some other feature flag, default to lowest
88
// supported version
9-
#[cfg(all(
10-
feature = "need_arrow",
11-
not(feature = "arrow_55"),
12-
))]
9+
#[cfg(all(feature = "need_arrow", not(feature = "arrow_55"),))]
1310
compile_error!("Requested a feature that needs arrow without enabling arrow. Please enable the `arrow_53` or `arrow_54` feature");

0 commit comments

Comments
 (0)