Skip to content

Commit

Permalink
build(deps): bump datafusion from 06e9f53 to 90775b4 (#981)
Browse files Browse the repository at this point in the history
Bumps [datafusion](https://github.com/apache/arrow-datafusion) from `06e9f53` to `90775b4`.
- [Commits](apache/datafusion@06e9f53...90775b4)

- Instead of `From<Vec<(Field, Arc<dyn Array>)>>` it's now From<Vec<(Arc<Field>, Arc<dyn Array>)>> which essentially avoids Field::clone.
- No csv::Reader::new, now have to use csv::ReaderBuilder
- StreamReader::new creates a buffered reader by default now

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
  • Loading branch information
vrongmeal and dependabot[bot] authored May 13, 2023
1 parent 68d7293 commit 1f9b55e
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 72 deletions.
96 changes: 48 additions & 48 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

15 changes: 4 additions & 11 deletions crates/datasource_mongodb/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,26 +117,19 @@ impl ArrayBuilder for RecordStructBuilder {
let builders = std::mem::take(&mut self.builders);
let arrays = builders.into_iter().map(|mut b| b.finish());

let pairs: Vec<(Field, Arc<dyn Array>)> = fields
.into_iter()
.map(|f| f.as_ref().clone())
.zip(arrays)
.collect();
let pairs: Vec<(Arc<Field>, Arc<dyn Array>)> =
fields.into_iter().map(Arc::clone).zip(arrays).collect();

let array: StructArray = pairs.into();

Arc::new(array)
}

fn finish_cloned(&self) -> ArrayRef {
let fields = self.fields.clone();
let arrays: Vec<Arc<dyn Array>> = self.builders.iter().map(|b| b.finish_cloned()).collect();

let pairs: Vec<(Field, Arc<dyn Array>)> = fields
.into_iter()
.map(|f| f.as_ref().clone())
.zip(arrays)
.collect();
let pairs: Vec<(Arc<Field>, Arc<dyn Array>)> =
self.fields.iter().map(Arc::clone).zip(arrays).collect();

let array: StructArray = pairs.into();

Expand Down
24 changes: 13 additions & 11 deletions crates/datasource_object_store/src/csv/csv_helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,19 @@ pub struct CsvConfig {

impl CsvConfig {
fn open<R: std::io::Read>(&self, reader: R, first_chunk: bool) -> csv::Reader<R> {
let datetime_format = None;
csv::Reader::new(
reader,
Arc::clone(&self.file_schema),
self.has_header && first_chunk,
Some(self.delimiter),
self.batch_size,
None,
self.file_projection.clone(),
datetime_format,
)
let mut builder = csv::ReaderBuilder::new(self.file_schema.clone())
.has_header(self.has_header && first_chunk)
.with_delimiter(self.delimiter)
.with_batch_size(self.batch_size);

if let Some(projection) = &self.file_projection {
builder = builder.with_projection(projection.clone());
}

// NB: This function never errors. If it ever does (in future after DF
// changes), this is a programming error and panic-ing is the correct
// thing to do here.
builder.build(reader).expect("should be a valid csv reader")
}
}

Expand Down
10 changes: 8 additions & 2 deletions crates/snowflake_connector/src/query.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
use std::{collections::HashMap, fmt::Debug, io::Cursor, sync::Arc, vec};
use std::{
collections::HashMap,
fmt::Debug,
io::{BufReader, Cursor},
sync::Arc,
vec,
};

use datafusion::{
arrow::{
Expand Down Expand Up @@ -236,7 +242,7 @@ macro_rules! make_json_column {

pub enum RecordBatchIter {
Stream {
reader: StreamReader<Cursor<Vec<u8>>>,
reader: StreamReader<BufReader<Cursor<Vec<u8>>>>,
schema: SchemaRef,
type_metas: Arc<Vec<SnowflakeTypeMeta>>,
},
Expand Down

0 comments on commit 1f9b55e

Please sign in to comment.