Skip to content

Commit e1e5483

Browse files
chore: Bump arrow-rs to 53.1.0 and datafusion (apache#1001)
## Which issue does this PR close? ## Rationale for this change Arrow-rs 53.1.0 includes performance improvements ## What changes are included in this PR? Bumping arrow-rs to 53.1.0 and datafusion to a revision ## How are these changes tested? existing tests
1 parent ff41f1b commit e1e5483

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

src/scalar_funcs/hash_expressions.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ use arrow_array::{ArrayRef, Int32Array, Int64Array, StringArray};
2222
use datafusion::functions::crypto::{sha224, sha256, sha384, sha512};
2323
use datafusion_common::cast::as_binary_array;
2424
use datafusion_common::{exec_err, internal_err, DataFusionError, ScalarValue};
25-
use datafusion_expr::{ColumnarValue, ScalarFunctionImplementation};
25+
use datafusion_expr::{ColumnarValue, ScalarUDF};
2626
use std::sync::Arc;
2727

2828
/// Spark compatible murmur3 hash (just `hash` in Spark) in vectorized execution fashion
@@ -115,31 +115,31 @@ pub fn spark_xxhash64(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusio
115115

116116
/// `sha224` function that simulates Spark's `sha2` expression with bit width 224
117117
pub fn spark_sha224(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusionError> {
118-
wrap_digest_result_as_hex_string(args, sha224().fun())
118+
wrap_digest_result_as_hex_string(args, sha224())
119119
}
120120

121121
/// `sha256` function that simulates Spark's `sha2` expression with bit width 0 or 256
122122
pub fn spark_sha256(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusionError> {
123-
wrap_digest_result_as_hex_string(args, sha256().fun())
123+
wrap_digest_result_as_hex_string(args, sha256())
124124
}
125125

126126
/// `sha384` function that simulates Spark's `sha2` expression with bit width 384
127127
pub fn spark_sha384(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusionError> {
128-
wrap_digest_result_as_hex_string(args, sha384().fun())
128+
wrap_digest_result_as_hex_string(args, sha384())
129129
}
130130

131131
/// `sha512` function that simulates Spark's `sha2` expression with bit width 512
132132
pub fn spark_sha512(args: &[ColumnarValue]) -> Result<ColumnarValue, DataFusionError> {
133-
wrap_digest_result_as_hex_string(args, sha512().fun())
133+
wrap_digest_result_as_hex_string(args, sha512())
134134
}
135135

136136
// Spark requires hex string as the result of sha2 functions, we have to wrap the
137137
// result of digest functions as hex string
138138
fn wrap_digest_result_as_hex_string(
139139
args: &[ColumnarValue],
140-
digest: ScalarFunctionImplementation,
140+
digest: Arc<ScalarUDF>,
141141
) -> Result<ColumnarValue, DataFusionError> {
142-
let value = digest(args)?;
142+
let value = digest.invoke(args)?;
143143
match value {
144144
ColumnarValue::Array(array) => {
145145
let binary_array = as_binary_array(&array)?;

0 commit comments

Comments
 (0)