Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] support projection and partial reads in parquet client 3/n #101

Closed
wants to merge 11 commits into from
2 changes: 1 addition & 1 deletion acceptance/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ tar = "0.4"

[dev-dependencies]
arrow = { version = "^49.0", features = ["json", "prettyprint"] }
datatest-stable = "0.1.3"
datatest-stable = "0.2"
test-log = { version = "0.2", default-features = false, features = ["trace"] }
tempfile = "3"
test-case = { version = "3.1.0" }
Expand Down
35 changes: 26 additions & 9 deletions kernel/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,13 @@ version.workspace = true

[dependencies]
arrow-array = { version = "^49.0" }
arrow-arith = { version = "^49.0" }
arrow-json = { version = "^49.0" }
arrow-ord = { version = "^49.0" }
arrow-schema = { version = "^49.0" }
arrow-select = { version = "^49.0" }
bytes = "1.4"
chrono = { version = "0.4", optional = true }
chrono = { version = "0.4" }
either = "1.8"
fix-hidden-lifetime-bug = "0.2"
itertools = "0.12"
lazy_static = "1.4"
# need to generalize over arrow, arrow2 and diff parquet etc. (BYOP)
regex = "1.8"
roaring = "0.10.1"
serde = { version = "1", features = ["derive"] }
Expand All @@ -30,13 +25,18 @@ thiserror = "1"
# only for structured logging
tracing = "0.1"
url = "2"
uuid = "1.3.0"
uuid = { version = "1.3.0", features = ["v4"] }
z85 = "3.0.5"

# used for developer-visibility
visibility = "0.1.0"

# Used in default client
arrow-arith = { version = "^49.0", optional = true }
arrow-cast = { version = "^49.0", optional = true }
arrow-json = { version = "^49.0", optional = true }
arrow-ord = { version = "^49.0", optional = true }
arrow-schema = { version = "^49.0", optional = true }
futures = { version = "0.3", optional = true }
object_store = { version = "^0.8.0", optional = true }
parquet = { version = "^49.0", optional = true, features = [
Expand All @@ -46,15 +46,32 @@ parquet = { version = "^49.0", optional = true, features = [

# optionally used with default client (though not required)
tokio = { version = "1", optional = true, features = ["rt-multi-thread"] }
arrow = { version = "^49.0", features = [
"json",
"prettyprint",
"test_utils",
], optional = true }
rand = { version = "0.8", optional = true }

[features]
default = ["default-client"]
default-client = ["chrono", "futures", "object_store", "parquet"]
default-client = [
"arrow-arith",
"arrow-cast",
"arrow-json",
"arrow-ord",
"arrow-schema",
"futures",
"object_store",
"parquet",
]
developer-visibility = []
test-utils = ["arrow", "default-client", "rand", "serde"]
serde = []

[dev-dependencies]
arrow = { version = "^49.0", features = ["json", "prettyprint"] }
deltakernel = { path = ".", features = ["tokio"] }
deltakernel = { path = ".", features = ["tokio", "test-utils"] }
test-log = { version = "0.2", default-features = false, features = ["trace"] }
tempfile = "3"
test-case = { version = "3.1.0" }
Expand Down
17 changes: 8 additions & 9 deletions kernel/examples/inspect-table/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use deltakernel::client::executor::tokio::TokioBackgroundExecutor;
use deltakernel::client::DefaultTableClient;
use deltakernel::scan::ScanBuilder;
use deltakernel::schema::StructType;
use deltakernel::{DeltaResult, Table};

use deltakernel::actions::{parse_actions, Action, ActionType};
Expand All @@ -10,7 +11,6 @@ use std::path::PathBuf;
use std::sync::Arc;

use arrow_array::RecordBatch;
use arrow_schema::Schema as ArrowSchema;
use clap::{Parser, Subcommand};

#[derive(Parser)]
Expand Down Expand Up @@ -98,21 +98,20 @@ fn main() {
println!("{:#?}", files);
}
Commands::Actions { forward } => {
let action_types = [
let action_types = vec![
//ActionType::CommitInfo,
ActionType::Metadata,
ActionType::Protocol,
ActionType::Remove,
ActionType::Add,
];
let read_schema = Arc::new(ArrowSchema {
fields: action_types
.as_ref()
let read_schema = Arc::new(StructType::new(
action_types
.iter()
.map(|a| Arc::new(a.schema_field().try_into().unwrap()))
.map(|a| a.schema_field())
.cloned()
.collect(),
metadata: Default::default(),
});
));

let batches = snapshot
._log_segment()
Expand All @@ -137,7 +136,7 @@ fn main() {
}
println!("-- at {:0>20} --", index);
let (batch, _) = batch.unwrap();
let actions = parse_actions(&batch, action_types.as_ref()).unwrap();
let actions = parse_actions(&batch, &action_types).unwrap();
for action in actions {
match action {
Action::Metadata(md) => println!("{:#?}", md),
Expand Down
28 changes: 17 additions & 11 deletions kernel/src/actions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use arrow_array::{
use either::Either;
use fix_hidden_lifetime_bug::fix_hidden_lifetime_bug;
use itertools::izip;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};

use crate::{DeltaResult, Error};

Expand Down Expand Up @@ -37,7 +39,13 @@ pub enum ActionType {
}

#[derive(Debug, PartialEq, Eq, Clone)]
#[cfg_attr(
feature = "serde",
derive(Serialize, Deserialize),
serde(rename_all = "camelCase")
)]
pub enum Action {
#[cfg_attr(feature = "serde", serde(rename = "metaData"))]
Metadata(Metadata),
Protocol(Protocol),
Add(Add),
Expand Down Expand Up @@ -508,8 +516,8 @@ fn cast_struct_column<T: 'static>(arr: &StructArray, name: impl AsRef<str>) -> D
}

fn struct_array_to_map(arr: &StructArray) -> DeltaResult<HashMap<String, Option<String>>> {
let keys = cast_struct_column::<StringArray>(arr, "keys")?;
let values = cast_struct_column::<StringArray>(arr, "values")?;
let keys = cast_struct_column::<StringArray>(arr, "key")?;
let values = cast_struct_column::<StringArray>(arr, "value")?;
Ok(keys
.into_iter()
.zip(values)
Expand All @@ -521,7 +529,7 @@ fn struct_array_to_map(arr: &StructArray) -> DeltaResult<HashMap<String, Option<
mod tests {
use std::sync::Arc;

use arrow_schema::Schema as ArrowSchema;
use arrow_array::ArrayRef;
use object_store::local::LocalFileSystem;

use super::*;
Expand All @@ -535,14 +543,13 @@ mod tests {
let store = Arc::new(LocalFileSystem::new());
let handler = DefaultJsonHandler::new(store, Arc::new(TokioBackgroundExecutor::new()));

let json_strings: StringArray = vec![
let json_strings: ArrayRef = Arc::new(StringArray::from(vec![
r#"{"add":{"path":"part-00000-fae5310a-a37d-4e51-827b-c3d5516560ca-c000.snappy.parquet","partitionValues":{},"size":635,"modificationTime":1677811178336,"dataChange":true,"stats":"{\"numRecords\":10,\"minValues\":{\"value\":0},\"maxValues\":{\"value\":9},\"nullCount\":{\"value\":0},\"tightBounds\":true}","tags":{"INSERTION_TIME":"1677811178336000","MIN_INSERTION_TIME":"1677811178336000","MAX_INSERTION_TIME":"1677811178336000","OPTIMIZE_TARGET_SIZE":"268435456"}}}"#,
r#"{"commitInfo":{"timestamp":1677811178585,"operation":"WRITE","operationParameters":{"mode":"ErrorIfExists","partitionBy":"[]"},"isolationLevel":"WriteSerializable","isBlindAppend":true,"operationMetrics":{"numFiles":"1","numOutputRows":"10","numOutputBytes":"635"},"engineInfo":"Databricks-Runtime/<unknown>","txnId":"a6a94671-55ef-450e-9546-b8465b9147de"}}"#,
r#"{"protocol":{"minReaderVersion":3,"minWriterVersion":7,"readerFeatures":["deletionVectors"],"writerFeatures":["deletionVectors"]}}"#,
r#"{"metaData":{"id":"testId","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"value\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":[],"configuration":{"delta.enableDeletionVectors":"true","delta.columnMapping.mode":"none"},"createdTime":1677811175819}}"#,
]
.into();
let output_schema = Arc::new(ArrowSchema::try_from(log_schema()).unwrap());
]));
let output_schema = Arc::new(log_schema().clone());
handler.parse_json(json_strings, output_schema).unwrap()
}

Expand Down Expand Up @@ -598,16 +605,15 @@ mod tests {
let store = Arc::new(LocalFileSystem::new());
let handler = DefaultJsonHandler::new(store, Arc::new(TokioBackgroundExecutor::new()));

let json_strings: StringArray = vec![
let json_strings: ArrayRef = Arc::new(StringArray::from(vec![
r#"{"commitInfo":{"timestamp":1670892998177,"operation":"WRITE","operationParameters":{"mode":"Append","partitionBy":"[\"c1\",\"c2\"]"},"isolationLevel":"Serializable","isBlindAppend":true,"operationMetrics":{"numFiles":"3","numOutputRows":"3","numOutputBytes":"1356"},"engineInfo":"Apache-Spark/3.3.1 Delta-Lake/2.2.0","txnId":"046a258f-45e3-4657-b0bf-abfb0f76681c"}}"#,
r#"{"protocol":{"minReaderVersion":1,"minWriterVersion":2}}"#,
r#"{"metaData":{"id":"aff5cb91-8cd9-4195-aef9-446908507302","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"c1\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"c2\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"c3\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":["c1","c2"],"configuration":{},"createdTime":1670892997849}}"#,
r#"{"add":{"path":"c1=4/c2=c/part-00003-f525f459-34f9-46f5-82d6-d42121d883fd.c000.snappy.parquet","partitionValues":{"c1":"4","c2":"c"},"size":452,"modificationTime":1670892998135,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"c3\":5},\"maxValues\":{\"c3\":5},\"nullCount\":{\"c3\":0}}"}}"#,
r#"{"add":{"path":"c1=5/c2=b/part-00007-4e73fa3b-2c88-424a-8051-f8b54328ffdb.c000.snappy.parquet","partitionValues":{"c1":"5","c2":"b"},"size":452,"modificationTime":1670892998135,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"c3\":6},\"maxValues\":{\"c3\":6},\"nullCount\":{\"c3\":0}}"}}"#,
r#"{"add":{"path":"c1=6/c2=a/part-00011-10619b10-b691-4fd0-acc4-2a9608499d7c.c000.snappy.parquet","partitionValues":{"c1":"6","c2":"a"},"size":452,"modificationTime":1670892998135,"dataChange":true,"stats":"{\"numRecords\":1,\"minValues\":{\"c3\":4},\"maxValues\":{\"c3\":4},\"nullCount\":{\"c3\":0}}"}}"#,
]
.into();
let output_schema = Arc::new(ArrowSchema::try_from(log_schema()).unwrap());
]));
let output_schema = Arc::new(log_schema().clone());
let batch = handler.parse_json(json_strings, output_schema).unwrap();

let actions = parse_action(&batch, &ActionType::Add)
Expand Down
46 changes: 23 additions & 23 deletions kernel/src/actions/schemas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,30 +10,30 @@ lazy_static! {
static ref METADATA_FIELD: StructField = StructField::new(
"metaData",
StructType::new(vec![
StructField::new("id", DataType::STRING, false),
StructField::new("id", DataType::STRING, true),
StructField::new("name", DataType::STRING, true),
StructField::new("description", DataType::STRING, true),
StructField::new(
"format",
StructType::new(vec![
StructField::new("provider", DataType::STRING, false),
StructField::new("provider", DataType::STRING, true),
StructField::new(
"configuration",
"options",
MapType::new(
DataType::STRING,
DataType::STRING,
true,
),
true,
false,
),
]),
false,
),
StructField::new("schemaString", DataType::STRING, false),
StructField::new("schemaString", DataType::STRING, true),
StructField::new(
"partitionColumns",
ArrayType::new(DataType::STRING, false),
false,
true,
),
StructField::new("createdTime", DataType::LONG, true),
StructField::new(
Expand All @@ -52,16 +52,16 @@ lazy_static! {
static ref PROTOCOL_FIELD: StructField = StructField::new(
"protocol",
StructType::new(vec![
StructField::new("minReaderVersion", DataType::INTEGER, false),
StructField::new("minWriterVersion", DataType::INTEGER, false),
StructField::new("minReaderVersion", DataType::INTEGER, true),
StructField::new("minWriterVersion", DataType::INTEGER, true),
StructField::new(
"readerFeatures",
ArrayType::new(DataType::STRING, false),
ArrayType::new(DataType::STRING, true),
true,
),
StructField::new(
"writerFeatures",
ArrayType::new(DataType::STRING, false),
ArrayType::new(DataType::STRING, true),
true,
),
]),
Expand Down Expand Up @@ -102,11 +102,11 @@ lazy_static! {
static ref ADD_FIELD: StructField = StructField::new(
"add",
StructType::new(vec![
StructField::new("path", DataType::STRING, false),
StructField::new("path", DataType::STRING, true),
partition_values_field(),
StructField::new("size", DataType::LONG, false),
StructField::new("modificationTime", DataType::LONG, false),
StructField::new("dataChange", DataType::BOOLEAN, false),
StructField::new("size", DataType::LONG, true),
StructField::new("modificationTime", DataType::LONG, true),
StructField::new("dataChange", DataType::BOOLEAN, true),
StructField::new("stats", DataType::STRING, true),
tags_field(),
deletion_vector_field(),
Expand All @@ -120,9 +120,9 @@ lazy_static! {
static ref REMOVE_FIELD: StructField = StructField::new(
"remove",
StructType::new(vec![
StructField::new("path", DataType::STRING, false),
StructField::new("path", DataType::STRING, true),
StructField::new("deletionTimestamp", DataType::LONG, true),
StructField::new("dataChange", DataType::BOOLEAN, false),
StructField::new("dataChange", DataType::BOOLEAN, true),
StructField::new("extendedFileMetadata", DataType::BOOLEAN, true),
partition_values_field(),
StructField::new("size", DataType::LONG, true),
Expand Down Expand Up @@ -177,7 +177,7 @@ lazy_static! {
DataType::STRING,
true,
),
false,
true,
),
StructField::new("removed", DataType::BOOLEAN, false),
]),
Expand All @@ -197,7 +197,7 @@ lazy_static! {
"sidecar",
StructType::new(vec![
StructField::new("path", DataType::STRING, false),
StructField::new("sizeInBytes", DataType::LONG, false),
StructField::new("sizeInBytes", DataType::LONG, true),
StructField::new("modificationTime", DataType::LONG, false),
StructField::new("type", DataType::STRING, false),
tags_field(),
Expand Down Expand Up @@ -231,19 +231,19 @@ fn partition_values_field() -> StructField {
StructField::new(
"partitionValues",
MapType::new(DataType::STRING, DataType::STRING, true),
false,
true,
)
}

fn deletion_vector_field() -> StructField {
StructField::new(
"deletionVector",
DataType::Struct(Box::new(StructType::new(vec![
StructField::new("storageType", DataType::STRING, false),
StructField::new("pathOrInlineDv", DataType::STRING, false),
StructField::new("storageType", DataType::STRING, true),
StructField::new("pathOrInlineDv", DataType::STRING, true),
StructField::new("offset", DataType::INTEGER, true),
StructField::new("sizeInBytes", DataType::INTEGER, false),
StructField::new("cardinality", DataType::LONG, false),
StructField::new("sizeInBytes", DataType::INTEGER, true),
StructField::new("cardinality", DataType::LONG, true),
]))),
true,
)
Expand Down
Loading
Loading