Skip to content

Commit 2ae3818

Browse files
authored
chore: enforce clippy lint needless_pass_by_value for datafusion-sql (#18554)
## Which issue does this PR close? - Closes #18546. ## Rationale for this change enforce clippy lint `needless_pass_by_value` ## Are these changes tested? yes ## Are there any user-facing changes? no
1 parent 8e3f157 commit 2ae3818

File tree

7 files changed

+49
-44
lines changed

7 files changed

+49
-44
lines changed

datafusion/sql/src/cte.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
4646

4747
// Create a logical plan for the CTE
4848
let cte_plan = if is_recursive {
49-
self.recursive_cte(cte_name.clone(), *cte.query, planner_context)?
49+
self.recursive_cte(&cte_name, *cte.query, planner_context)?
5050
} else {
5151
self.non_recursive_cte(*cte.query, planner_context)?
5252
};
@@ -70,7 +70,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
7070

7171
fn recursive_cte(
7272
&self,
73-
cte_name: String,
73+
cte_name: &str,
7474
mut cte_query: Query,
7575
planner_context: &mut PlannerContext,
7676
) -> Result<LogicalPlan> {
@@ -136,7 +136,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
136136
// Step 2.1: Create a table source for the temporary relation
137137
let work_table_source = self
138138
.context_provider
139-
.create_cte_work_table(&cte_name, Arc::clone(static_plan.schema().inner()))?;
139+
.create_cte_work_table(cte_name, Arc::clone(static_plan.schema().inner()))?;
140140

141141
// Step 2.2: Create a temporary relation logical plan that will be used
142142
// as the input to the recursive term
@@ -147,14 +147,14 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
147147
)?
148148
.build()?;
149149

150-
let name = cte_name.clone();
150+
let name = cte_name.to_string();
151151

152152
// Step 2.3: Register the temporary relation in the planning context
153153
// For all the self references in the variadic term, we'll replace it
154154
// with the temporary relation we created above by temporarily registering
155155
// it as a CTE. This temporary relation in the planning context will be
156156
// replaced by the actual CTE plan once we're done with the planning.
157-
planner_context.insert_cte(cte_name.clone(), work_table_plan);
157+
planner_context.insert_cte(cte_name.to_string(), work_table_plan);
158158

159159
// ---------- Step 3: Compile the recursive term ------------------
160160
// this uses the named_relation we inserted above to resolve the
@@ -166,7 +166,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
166166
// if not, it is a non-recursive CTE
167167
if !has_work_table_reference(&recursive_plan, &work_table_source) {
168168
// Remove the work table plan from the context
169-
planner_context.remove_cte(&cte_name);
169+
planner_context.remove_cte(cte_name);
170170
// Compile it as a non-recursive CTE
171171
return self.set_operation_to_plan(
172172
SetOperator::Union,

datafusion/sql/src/expr/binary_op.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ use datafusion_expr::Operator;
2121
use sqlparser::ast::BinaryOperator;
2222

2323
impl<S: ContextProvider> SqlToRel<'_, S> {
24-
pub(crate) fn parse_sql_binary_op(&self, op: BinaryOperator) -> Result<Operator> {
25-
match op {
24+
pub(crate) fn parse_sql_binary_op(&self, op: &BinaryOperator) -> Result<Operator> {
25+
match *op {
2626
BinaryOperator::Gt => Ok(Operator::Gt),
2727
BinaryOperator::GtEq => Ok(Operator::GtEq),
2828
BinaryOperator::Lt => Ok(Operator::Lt),

datafusion/sql/src/expr/mod.rs

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
140140
let RawBinaryExpr { op, left, right } = binary_expr;
141141
Ok(Expr::BinaryExpr(BinaryExpr::new(
142142
Box::new(left),
143-
self.parse_sql_binary_op(op)?,
143+
self.parse_sql_binary_op(&op)?,
144144
Box::new(right),
145145
)))
146146
}
@@ -270,7 +270,9 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
270270
expr,
271271
data_type,
272272
format,
273-
} => self.sql_cast_to_expr(*expr, data_type, format, schema, planner_context),
273+
} => {
274+
self.sql_cast_to_expr(*expr, &data_type, format, schema, planner_context)
275+
}
274276

275277
SQLExpr::Cast {
276278
kind: CastKind::TryCast | CastKind::SafeCast,
@@ -553,7 +555,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
553555
}
554556

555557
SQLExpr::Struct { values, fields } => {
556-
self.parse_struct(schema, planner_context, values, fields)
558+
self.parse_struct(schema, planner_context, values, &fields)
557559
}
558560
SQLExpr::Position { expr, r#in } => {
559561
self.sql_position_to_expr(*expr, *r#in, schema, planner_context)
@@ -639,7 +641,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
639641
schema: &DFSchema,
640642
planner_context: &mut PlannerContext,
641643
values: Vec<SQLExpr>,
642-
fields: Vec<StructField>,
644+
fields: &[StructField],
643645
) -> Result<Expr> {
644646
if !fields.is_empty() {
645647
return not_impl_err!("Struct fields are not supported yet");
@@ -673,7 +675,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
673675
Some(SQLExpr::Identifier(_))
674676
| Some(SQLExpr::Value(_))
675677
| Some(SQLExpr::CompoundIdentifier(_)) => {
676-
self.parse_struct(schema, planner_context, values, vec![])
678+
self.parse_struct(schema, planner_context, values, &[])
677679
}
678680
None => not_impl_err!("Empty tuple not supported yet"),
679681
_ => {
@@ -979,7 +981,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
979981
fn sql_cast_to_expr(
980982
&self,
981983
expr: SQLExpr,
982-
data_type: SQLDataType,
984+
data_type: &SQLDataType,
983985
format: Option<CastFormat>,
984986
schema: &DFSchema,
985987
planner_context: &mut PlannerContext,
@@ -988,7 +990,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
988990
return not_impl_err!("CAST with format is not supported: {format}");
989991
}
990992

991-
let dt = self.convert_data_type_to_field(&data_type)?;
993+
let dt = self.convert_data_type_to_field(data_type)?;
992994
let expr = self.sql_expr_to_logical_expr(expr, schema, planner_context)?;
993995

994996
// numeric constants are treated as seconds (rather as nanoseconds)

datafusion/sql/src/expr/subquery.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
7474

7575
self.validate_single_column(
7676
&sub_plan,
77-
spans.clone(),
77+
&spans,
7878
"Too many columns! The subquery should only return one column",
7979
"Select only one column in the subquery",
8080
)?;
@@ -116,7 +116,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
116116

117117
self.validate_single_column(
118118
&sub_plan,
119-
spans.clone(),
119+
&spans,
120120
"Too many columns! The subquery should only return one column",
121121
"Select only one column in the subquery",
122122
)?;
@@ -131,7 +131,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
131131
fn validate_single_column(
132132
&self,
133133
sub_plan: &LogicalPlan,
134-
spans: Spans,
134+
spans: &Spans,
135135
error_message: &str,
136136
help_message: &str,
137137
) -> Result<()> {
@@ -148,7 +148,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
148148

149149
fn build_multi_column_diagnostic(
150150
&self,
151-
spans: Spans,
151+
spans: &Spans,
152152
error_message: &str,
153153
help_message: &str,
154154
) -> Diagnostic {

datafusion/sql/src/lib.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,9 @@
2323
// Make sure fast / cheap clones on Arc are explicit:
2424
// https://github.com/apache/datafusion/issues/11143
2525
#![deny(clippy::clone_on_ref_ptr)]
26+
// https://github.com/apache/datafusion/issues/18503
27+
#![deny(clippy::needless_pass_by_value)]
28+
#![cfg_attr(test, allow(clippy::needless_pass_by_value))]
2629

2730
//! This crate provides:
2831
//!

datafusion/sql/src/parser.rs

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -456,7 +456,7 @@ impl<'a> DFParser<'a> {
456456
break;
457457
}
458458
if expecting_statement_delimiter {
459-
return self.expected("end of statement", self.parser.peek_token());
459+
return self.expected("end of statement", &self.parser.peek_token());
460460
}
461461

462462
let statement = self.parse_statement()?;
@@ -470,7 +470,7 @@ impl<'a> DFParser<'a> {
470470
fn expected<T>(
471471
&self,
472472
expected: &str,
473-
found: TokenWithSpan,
473+
found: &TokenWithSpan,
474474
) -> Result<T, DataFusionError> {
475475
let sql_parser_span = found.span;
476476
let span = Span::try_from_sqlparser_span(sql_parser_span);
@@ -488,11 +488,11 @@ impl<'a> DFParser<'a> {
488488
fn expect_token(
489489
&mut self,
490490
expected: &str,
491-
token: Token,
491+
token: &Token,
492492
) -> Result<(), DataFusionError> {
493493
let next_token = self.parser.peek_token_ref();
494-
if next_token.token != token {
495-
self.expected(expected, next_token.clone())
494+
if next_token.token != *token {
495+
self.expected(expected, next_token)
496496
} else {
497497
Ok(())
498498
}
@@ -553,7 +553,7 @@ impl<'a> DFParser<'a> {
553553
/// contains any trailing, unparsed tokens.
554554
pub fn parse_into_expr(&mut self) -> Result<ExprWithAlias, DataFusionError> {
555555
let expr = self.parse_expr()?;
556-
self.expect_token("end of expression", Token::EOF)?;
556+
self.expect_token("end of expression", &Token::EOF)?;
557557
Ok(expr)
558558
}
559559

@@ -638,7 +638,7 @@ impl<'a> DFParser<'a> {
638638
if token == Token::EOF || token == Token::SemiColon {
639639
break;
640640
} else {
641-
return self.expected("end of statement or ;", token)?;
641+
return self.expected("end of statement or ;", &token)?;
642642
}
643643
}
644644
}
@@ -675,15 +675,15 @@ impl<'a> DFParser<'a> {
675675
// Unquoted namespaced keys have to conform to the syntax
676676
// "<WORD>[\.<WORD>]*". If we have a key that breaks this
677677
// pattern, error out:
678-
return self.expected("key name", next_token);
678+
return self.expected("key name", &next_token);
679679
}
680680
}
681681
Ok(parts.join("."))
682682
}
683683
Token::SingleQuotedString(s) => Ok(s),
684684
Token::DoubleQuotedString(s) => Ok(s),
685685
Token::EscapedStringLiteral(s) => Ok(s),
686-
_ => self.expected("key name", next_token),
686+
_ => self.expected("key name", &next_token),
687687
}
688688
}
689689

@@ -702,7 +702,7 @@ impl<'a> DFParser<'a> {
702702
Token::DoubleQuotedString(s) => Ok(Value::DoubleQuotedString(s)),
703703
Token::EscapedStringLiteral(s) => Ok(Value::EscapedStringLiteral(s)),
704704
Token::Number(n, l) => Ok(Value::Number(n, l)),
705-
_ => self.expected("string or numeric value", next_token),
705+
_ => self.expected("string or numeric value", &next_token),
706706
}
707707
}
708708

@@ -732,7 +732,7 @@ impl<'a> DFParser<'a> {
732732
Token::Word(w) => Ok(w.value),
733733
Token::SingleQuotedString(w) => Ok(w),
734734
Token::DoubleQuotedString(w) => Ok(w),
735-
_ => self.expected("an explain format such as TREE", next_token),
735+
_ => self.expected("an explain format such as TREE", &next_token),
736736
}?;
737737
Ok(Some(format))
738738
}
@@ -777,7 +777,7 @@ impl<'a> DFParser<'a> {
777777
let identifier = self.parser.parse_identifier()?;
778778
partitions.push(identifier.to_string());
779779
} else {
780-
return self.expected("partition name", self.parser.peek_token());
780+
return self.expected("partition name", &self.parser.peek_token());
781781
}
782782
let comma = self.parser.consume_token(&Token::Comma);
783783
if self.parser.consume_token(&Token::RParen) {
@@ -786,7 +786,7 @@ impl<'a> DFParser<'a> {
786786
} else if !comma {
787787
return self.expected(
788788
"',' or ')' after partition definition",
789-
self.parser.peek_token(),
789+
&self.parser.peek_token(),
790790
);
791791
}
792792
}
@@ -857,7 +857,7 @@ impl<'a> DFParser<'a> {
857857
} else {
858858
return self.expected(
859859
"column name or constraint definition",
860-
self.parser.peek_token(),
860+
&self.parser.peek_token(),
861861
);
862862
}
863863
let comma = self.parser.consume_token(&Token::Comma);
@@ -867,7 +867,7 @@ impl<'a> DFParser<'a> {
867867
} else if !comma {
868868
return self.expected(
869869
"',' or ')' after column definition",
870-
self.parser.peek_token(),
870+
&self.parser.peek_token(),
871871
);
872872
}
873873
}
@@ -887,7 +887,7 @@ impl<'a> DFParser<'a> {
887887
} else {
888888
return self.expected(
889889
"constraint details after CONSTRAINT <name>",
890-
self.parser.peek_token(),
890+
&self.parser.peek_token(),
891891
);
892892
}
893893
} else if let Some(option) = self.parser.parse_optional_column_option()? {
@@ -1012,7 +1012,7 @@ impl<'a> DFParser<'a> {
10121012
if token == Token::EOF || token == Token::SemiColon {
10131013
break;
10141014
} else {
1015-
return self.expected("end of statement or ;", token)?;
1015+
return self.expected("end of statement or ;", &token)?;
10161016
}
10171017
}
10181018
}
@@ -1051,7 +1051,7 @@ impl<'a> DFParser<'a> {
10511051
let token = self.parser.next_token();
10521052
match &token.token {
10531053
Token::Word(w) => parse_file_type(&w.value),
1054-
_ => self.expected("one of ARROW, PARQUET, NDJSON, or CSV", token),
1054+
_ => self.expected("one of ARROW, PARQUET, NDJSON, or CSV", &token),
10551055
}
10561056
}
10571057

@@ -1074,7 +1074,7 @@ impl<'a> DFParser<'a> {
10741074
} else if !comma {
10751075
return self.expected(
10761076
"',' or ')' after option definition",
1077-
self.parser.peek_token(),
1077+
&self.parser.peek_token(),
10781078
);
10791079
}
10801080
}

datafusion/sql/src/statement.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1037,7 +1037,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
10371037
if limit.is_some() {
10381038
return not_impl_err!("Update-limit clause not supported")?;
10391039
}
1040-
self.update_to_plan(table, assignments, update_from, selection)
1040+
self.update_to_plan(table, &assignments, update_from, selection)
10411041
}
10421042

10431043
Statement::Delete(Delete {
@@ -1070,7 +1070,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
10701070
}
10711071

10721072
let table_name = self.get_delete_target(from)?;
1073-
self.delete_to_plan(table_name, selection)
1073+
self.delete_to_plan(&table_name, selection)
10741074
}
10751075

10761076
Statement::StartTransaction {
@@ -1100,7 +1100,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
11001100
if has_end_keyword {
11011101
return not_impl_err!("Transaction with END keyword not supported");
11021102
}
1103-
self.validate_transaction_kind(transaction)?;
1103+
self.validate_transaction_kind(transaction.as_ref())?;
11041104
let isolation_level: ast::TransactionIsolationLevel = modes
11051105
.iter()
11061106
.filter_map(|m: &TransactionMode| match m {
@@ -1903,7 +1903,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
19031903

19041904
fn delete_to_plan(
19051905
&self,
1906-
table_name: ObjectName,
1906+
table_name: &ObjectName,
19071907
predicate_expr: Option<SQLExpr>,
19081908
) -> Result<LogicalPlan> {
19091909
// Do a table lookup to verify the table exists
@@ -1947,7 +1947,7 @@ impl<S: ContextProvider> SqlToRel<'_, S> {
19471947
fn update_to_plan(
19481948
&self,
19491949
table: TableWithJoins,
1950-
assignments: Vec<Assignment>,
1950+
assignments: &[Assignment],
19511951
from: Option<TableWithJoins>,
19521952
predicate_expr: Option<SQLExpr>,
19531953
) -> Result<LogicalPlan> {
@@ -2353,7 +2353,7 @@ ON p.function_name = r.routine_name
23532353

23542354
fn validate_transaction_kind(
23552355
&self,
2356-
kind: Option<BeginTransactionKind>,
2356+
kind: Option<&BeginTransactionKind>,
23572357
) -> Result<()> {
23582358
match kind {
23592359
// BEGIN

0 commit comments

Comments
 (0)