Skip to content

Commit 148a5ab

Browse files
committed
Adress clippies
1 parent e85bca6 commit 148a5ab

File tree

8 files changed

+43
-54
lines changed

8 files changed

+43
-54
lines changed

datafusion/src/avro_to_arrow/arrow_array_reader.rs

Lines changed: 17 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -124,11 +124,7 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
124124
.collect()
125125
};
126126
let projected_schema = Arc::new(Schema::new(projected_fields));
127-
arrays.and_then(|arr| {
128-
RecordBatch::try_new(projected_schema, arr)
129-
.map(Some)
130-
.map_err(|e| e.into())
131-
})
127+
arrays.and_then(|arr| RecordBatch::try_new(projected_schema, arr).map(Some))
132128
}
133129

134130
fn build_boolean_array(
@@ -291,28 +287,25 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
291287
} else if let Value::Array(n) = value {
292288
n.into_iter()
293289
.map(|v| {
294-
if let Some(v) = resolve_string(&v) {
295-
Some(v)
296-
} else if matches!(
297-
v,
298-
Value::Array(_) | Value::Record(_) | Value::Null
299-
) {
300-
// implicitly drop nested values
301-
// TODO support deep-nesting
302-
None
303-
} else {
304-
None
305-
}
290+
resolve_string(&v)
291+
// else if matches!(
292+
// v,
293+
// Value::Array(_) | Value::Record(_) | Value::Null
294+
// ) {
295+
// // implicitly drop nested values
296+
// // TODO support deep-nesting
297+
// None
298+
// }
306299
})
307300
.collect()
308301
} else if let Value::Null = value {
309302
vec![None]
310303
} else if !matches!(value, Value::Record(_)) {
311304
vec![resolve_string(&value)]
312305
} else {
313-
return Err(SchemaError(format!(
314-
"Only scalars are currently supported in Avro arrays",
315-
)));
306+
return Err(SchemaError(
307+
"Only scalars are currently supported in Avro arrays".to_string(),
308+
));
316309
};
317310

318311
// TODO: ARROW-10335: APIs of dictionary arrays and others are different. Unify
@@ -724,7 +717,7 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
724717
rows.iter()
725718
.map(|row| {
726719
let maybe_value = self.field_lookup(field.name(), row);
727-
maybe_value.and_then(|value| resolve_bytes(value))
720+
maybe_value.and_then(resolve_bytes)
728721
})
729722
.collect::<BinaryArray>(),
730723
)
@@ -915,7 +908,7 @@ fn resolve_bytes(v: Value) -> Option<Vec<u8>> {
915908
}
916909
.ok()
917910
.and_then(|v| match v {
918-
Value::Bytes(s) => Some(s.clone()),
911+
Value::Bytes(s) => Some(s),
919912
_ => None,
920913
})
921914
}
@@ -948,11 +941,10 @@ where
948941
fn resolve(value: &Value) -> Option<Self::Native> {
949942
let value = if SchemaKind::from(value) == SchemaKind::Union {
950943
// Pull out the Union, and attempt to resolve against it.
951-
let v = match value {
944+
match value {
952945
Value::Union(b) => b,
953946
_ => unreachable!(),
954-
};
955-
v
947+
}
956948
} else {
957949
value
958950
};

datafusion/src/avro_to_arrow/mod.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ fn schema_to_field_with_props(
9797
.iter()
9898
.find(|&schema| !matches!(schema, AvroSchema::Null))
9999
{
100-
schema_to_field_with_props(&schema, None, has_nullable, None)?
100+
schema_to_field_with_props(schema, None, has_nullable, None)?
101101
.data_type()
102102
.clone()
103103
} else {
@@ -107,15 +107,15 @@ fn schema_to_field_with_props(
107107
}
108108
} else {
109109
let fields = sub_schemas
110-
.into_iter()
111-
.map(|s| schema_to_field_with_props(&s, None, has_nullable, None))
110+
.iter()
111+
.map(|s| schema_to_field_with_props(s, None, has_nullable, None))
112112
.collect::<Result<Vec<Field>>>()?;
113113
DataType::Union(fields)
114114
}
115115
}
116116
AvroSchema::Record { name, fields, .. } => {
117117
let fields: Result<Vec<Field>> = fields
118-
.into_iter()
118+
.iter()
119119
.map(|field| {
120120
let mut props = BTreeMap::new();
121121
if let Some(doc) = &field.doc {
@@ -278,7 +278,7 @@ fn external_props(schema: &AvroSchema) -> BTreeMap<String, String> {
278278
..
279279
} => {
280280
let aliases: Vec<String> = aliases
281-
.into_iter()
281+
.iter()
282282
.map(|alias| aliased(alias, namespace.as_deref(), None))
283283
.collect();
284284
props.insert("aliases".to_string(), format!("[{}]", aliases.join(",")));
@@ -295,7 +295,7 @@ fn get_metadata(
295295
) -> BTreeMap<String, String> {
296296
let mut metadata: BTreeMap<String, String> = Default::default();
297297
metadata.extend(props);
298-
return metadata;
298+
metadata
299299
}
300300

301301
/// Returns the fully qualified name for a field
@@ -307,7 +307,7 @@ pub fn aliased(
307307
if name.contains('.') {
308308
name.to_string()
309309
} else {
310-
let namespace = namespace.as_ref().map(|s| s.as_ref()).or(default_namespace);
310+
let namespace = namespace.as_ref().copied().or(default_namespace);
311311

312312
match namespace {
313313
Some(ref namespace) => format!("{}.{}", namespace, name),

datafusion/src/avro_to_arrow/reader.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -220,8 +220,8 @@ mod tests {
220220
assert_eq!(1, bool_col.0);
221221
assert_eq!(&DataType::Boolean, bool_col.1.data_type());
222222
let col = get_col::<BooleanArray>(&batch, bool_col).unwrap();
223-
assert_eq!(true, col.value(0));
224-
assert_eq!(false, col.value(1));
223+
assert!(col.value(0));
224+
assert!(!col.value(1));
225225
let tinyint_col = schema.column_with_name("tinyint_col").unwrap();
226226
assert_eq!(2, tinyint_col.0);
227227
assert_eq!(&DataType::Int32, tinyint_col.1.data_type());

datafusion/src/datasource/avro.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ impl AvroFile {
5353
/// Attempt to initialize a `AvroFile` from a path. The schema can be inferred automatically.
5454
pub fn try_new(path: &str, options: AvroReadOptions) -> Result<Self> {
5555
let schema = if let Some(schema) = options.schema {
56-
schema.clone()
56+
schema
5757
} else {
5858
let filenames =
5959
common::build_checked_file_list(path, options.file_extension)?;
@@ -74,7 +74,7 @@ impl AvroFile {
7474
options: AvroReadOptions,
7575
) -> Result<Self> {
7676
let schema = match options.schema {
77-
Some(s) => s.clone(),
77+
Some(s) => s,
7878
None => {
7979
return Err(DataFusionError::Execution(
8080
"Schema must be provided to CsvRead".to_string(),

datafusion/src/logical_plan/expr.rs

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1951,17 +1951,13 @@ mod tests {
19511951
impl ExprRewriter for FooBarRewriter {
19521952
fn mutate(&mut self, expr: Expr) -> Result<Expr> {
19531953
match expr {
1954-
Expr::Literal(scalar) => {
1955-
if let ScalarValue::Utf8(Some(utf8_val)) = scalar {
1956-
let utf8_val = if utf8_val == "foo" {
1957-
"bar".to_string()
1958-
} else {
1959-
utf8_val
1960-
};
1961-
Ok(lit(utf8_val))
1954+
Expr::Literal(ScalarValue::Utf8(Some(utf8_val))) => {
1955+
let utf8_val = if utf8_val == "foo" {
1956+
"bar".to_string()
19621957
} else {
1963-
Ok(Expr::Literal(scalar))
1964-
}
1958+
utf8_val
1959+
};
1960+
Ok(lit(utf8_val))
19651961
}
19661962
// otherwise, return the expression unchanged
19671963
expr => Ok(expr),

datafusion/src/physical_plan/datetime_expressions.rs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -236,14 +236,8 @@ pub fn date_trunc(args: &[ColumnarValue]) -> Result<ColumnarValue> {
236236
let f = |x: Option<i64>| x.map(|x| date_trunc_single(granularity, x)).transpose();
237237

238238
Ok(match array {
239-
ColumnarValue::Scalar(scalar) => {
240-
if let ScalarValue::TimestampNanosecond(v) = scalar {
241-
ColumnarValue::Scalar(ScalarValue::TimestampNanosecond((f)(*v)?))
242-
} else {
243-
return Err(DataFusionError::Execution(
244-
"array of `date_trunc` must be non-null scalar Utf8".to_string(),
245-
));
246-
}
239+
ColumnarValue::Scalar(ScalarValue::TimestampNanosecond(v)) => {
240+
ColumnarValue::Scalar(ScalarValue::TimestampNanosecond((f)(*v)?))
247241
}
248242
ColumnarValue::Array(array) => {
249243
let array = array
@@ -257,6 +251,11 @@ pub fn date_trunc(args: &[ColumnarValue]) -> Result<ColumnarValue> {
257251

258252
ColumnarValue::Array(Arc::new(array))
259253
}
254+
_ => {
255+
return Err(DataFusionError::Execution(
256+
"array of `date_trunc` must be non-null scalar Utf8".to_string(),
257+
));
258+
}
260259
})
261260
}
262261

datafusion/src/physical_plan/expressions/nth_value.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ impl NthValue {
7878
}
7979

8080
/// Create a new NTH_VALUE window aggregate function
81+
#[allow(clippy::self_named_constructors)]
8182
pub fn nth_value(
8283
name: impl Into<String>,
8384
expr: Arc<dyn PhysicalExpr>,

datafusion/src/physical_plan/string_expressions.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -290,6 +290,7 @@ pub fn concat(args: &[ColumnarValue]) -> Result<ColumnarValue> {
290290
.map(|index| {
291291
let mut owned_string: String = "".to_owned();
292292
for arg in args {
293+
#[allow(clippy::collapsible_match)]
293294
match arg {
294295
ColumnarValue::Scalar(ScalarValue::Utf8(maybe_value)) => {
295296
if let Some(value) = maybe_value {

0 commit comments

Comments
 (0)