Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to Arrow 35.0.0 #5441

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,10 @@ repository = "https://github.com/apache/arrow-datafusion"
rust-version = "1.64"

[workspace.dependencies]
arrow = { version = "34.0.0", features = ["prettyprint"] }
arrow-buffer = "34.0.0"
arrow-schema = "34.0.0"
parquet = { version = "34.0.0", features = ["arrow", "async"] }
arrow = { version = "35.0.0", features = ["prettyprint"] }
arrow-buffer = "35.0.0"
arrow-schema = "35.0.0"
parquet = { version = "35.0.0", features = ["arrow", "async"] }

[profile.release]
codegen-units = 1
Expand Down
62 changes: 31 additions & 31 deletions datafusion-cli/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion datafusion-cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ rust-version = "1.62"
readme = "README.md"

[dependencies]
arrow = "34.0.0"
arrow = "35.0.0"
async-trait = "0.1.41"
clap = { version = "3", features = ["derive", "cargo"] }
datafusion = { path = "../datafusion/core", version = "20.0.0" }
Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ required-features = ["datafusion/avro"]

[dev-dependencies]
arrow = { workspace = true }
arrow-flight = { version = "34.0.0", features = ["flight-sql-experimental"] }
arrow-flight = { version = "35.0.0", features = ["flight-sql-experimental"] }
arrow-schema = { workspace = true }
async-trait = "0.1.41"
dashmap = "5.4"
Expand Down
4 changes: 0 additions & 4 deletions datafusion/common/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3070,10 +3070,6 @@ mod tests {
ScalarValue::Decimal128(None, 10, 2),
ScalarValue::try_from_array(&array, 3).unwrap()
);
assert_eq!(
Copy link
Contributor Author

@tustvold tustvold Mar 1, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This test was wrong, index 4 is beyond the bounds of the array. Previously this was fine as the null buffer only enforced the length for multiples of 8

ScalarValue::Decimal128(None, 10, 2),
ScalarValue::try_from_array(&array, 4).unwrap()
);

Ok(())
}
Expand Down
8 changes: 4 additions & 4 deletions datafusion/core/tests/dataframe_functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -398,10 +398,10 @@ async fn test_fn_regexp_match() -> Result<()> {
"+-----------------------------------+",
"| regexpmatch(test.a,Utf8(\"[a-z]\")) |",
"+-----------------------------------+",
"| [] |",
"| [] |",
"| [] |",
"| [] |",
"| [a] |",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🎉

"| [a] |",
"| [d] |",
"| [b] |",
"+-----------------------------------+",
];

Expand Down
32 changes: 16 additions & 16 deletions datafusion/core/tests/sql/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1279,36 +1279,36 @@ async fn test_extract_date_part() -> Result<()> {
"12.0"
);
test_expression!(
"EXTRACT(second FROM to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"12.12345678"
"EXTRACT(second FROM to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"12.123456789"
);
test_expression!(
"EXTRACT(millisecond FROM to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"12123.45678"
"EXTRACT(millisecond FROM to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"12123.456789"
);
test_expression!(
"EXTRACT(microsecond FROM to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"12123456.78"
"EXTRACT(microsecond FROM to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"12123456.789"
);
test_expression!(
"EXTRACT(nanosecond FROM to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"1.212345678e10"
"EXTRACT(nanosecond FROM to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"1.2123456789e10"
);
test_expression!(
"date_part('second', to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"12.12345678"
"date_part('second', to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"12.123456789"
);
test_expression!(
"date_part('millisecond', to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"12123.45678"
"date_part('millisecond', to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"12123.456789"
);
test_expression!(
"date_part('microsecond', to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"12123456.78"
"date_part('microsecond', to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"12123456.789"
);
test_expression!(
"date_part('nanosecond', to_timestamp('2020-09-08T12:00:12.12345678+00:00'))",
"1.212345678e10"
"date_part('nanosecond', to_timestamp('2020-09-08T12:00:12.123456789+00:00'))",
"1.2123456789e10"
);
Ok(())
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/set_variable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ async fn set_time_zone_bad_time_zone_format() {
.await
.unwrap();
let err = pretty_format_batches(&result).err().unwrap().to_string();
assert_eq!(err, "Parser error: Invalid timezone \"+08:00:00\": Expected format [+-]XX:XX, [+-]XX, or [+-]XXXX");
assert_eq!(err, "Parser error: Invalid timezone \"+08:00:00\": only offset based timezones supported without chrono-tz feature");
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

perhaps we can improve the error message a bit in a future 😊


plan_and_collect(&ctx, "SET TIME ZONE = '08:00'")
.await
Expand Down
12 changes: 6 additions & 6 deletions datafusion/core/tests/sql/timestamp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1411,14 +1411,14 @@ async fn cast_timestamp_before_1970() -> Result<()> {

assert_batches_eq!(expected, &actual);

let sql = "select cast('1969-01-01T00:00:00.1Z' as timestamp);";
let sql = "select cast('1969-01-01T00:00:00.100Z' as timestamp);";
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what happens without this change?

Copy link
Contributor Author

@tustvold tustvold Mar 14, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

An error, it was a mis-reading of the RFC spec, I thought it restricted to multiples of 3 for subsecond precision, I'm working on a fix. It is somewhat amusing that chrono doesn't actually have any tests for this...

Copy link
Contributor

@alamb alamb Mar 14, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Filed upstream issue: apache/arrow-rs#3859 (I think apache/arrow-rs#3858 fixes it)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would tend to think this regression should block the upgrade of datafusion, but would defer to others.

maybe we can mark this PR as "ready for review" to get some other opinions 🤔

let actual = execute_to_batches(&ctx, sql).await;
let expected = vec![
"+--------------------------------+",
"| Utf8(\"1969-01-01T00:00:00.1Z\") |",
"+--------------------------------+",
"| 1969-01-01T00:00:00.100 |",
"+--------------------------------+",
"+----------------------------------+",
"| Utf8(\"1969-01-01T00:00:00.100Z\") |",
"+----------------------------------+",
"| 1969-01-01T00:00:00.100 |",
"+----------------------------------+",
];

assert_batches_eq!(expected, &actual);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ mod tests {
.project(proj)?
.build()?;

let expected = "Error parsing 'I'M NOT A TIMESTAMP' as timestamp";
let expected = "Error parsing timestamp from 'I'M NOT A TIMESTAMP'";
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

let actual = get_optimized_plan_err(&plan, &Utc::now());
assert_contains!(actual, expected);
Ok(())
Expand Down
8 changes: 5 additions & 3 deletions datafusion/physical-expr/src/expressions/in_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,11 @@ where
}
};

match data.null_buffer() {
Some(buffer) => BitIndexIterator::new(buffer.as_ref(), data.offset(), data.len())
.for_each(insert_value),
match data.nulls() {
Some(nulls) => {
BitIndexIterator::new(nulls.validity(), nulls.offset(), nulls.len())
.for_each(insert_value)
}
None => (0..data.len()).for_each(insert_value),
}

Expand Down
5 changes: 1 addition & 4 deletions datafusion/physical-expr/src/regex_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,10 +265,7 @@ fn _regexp_replace_static_pattern_replace<T: OffsetSizeTrait>(
let data = ArrayData::try_new(
GenericStringArray::<T>::DATA_TYPE,
string_array.len(),
string_array
.data_ref()
.null_buffer()
.map(|b| b.bit_slice(string_array.offset(), string_array.len())),
string_array.data().nulls().map(|b| b.inner().sliced()),
0,
vec![new_offsets.finish(), vals.finish()],
vec![],
Expand Down
2 changes: 1 addition & 1 deletion datafusion/proto/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,4 @@ tokio = "1.18"
[build-dependencies]
# Pin these dependencies so that the generated output is deterministic
pbjson-build = { version = "=0.5.1" }
prost-build = { version = "=0.11.7" }
prost-build = { version = "=0.11.8" }