Skip to content

Commit b685e2d

Browse files
authored
chore: fix typos of expr, functions, optimizer, physical-expr-common, physical-expr, and physical-plan packages (#11538)
1 parent be130b4 commit b685e2d

File tree

54 files changed

+89
-89
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

54 files changed

+89
-89
lines changed

datafusion/expr/src/aggregate_function.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,8 +152,8 @@ mod tests {
152152
use strum::IntoEnumIterator;
153153

154154
#[test]
155-
// Test for AggregateFuncion's Display and from_str() implementations.
156-
// For each variant in AggregateFuncion, it converts the variant to a string
155+
// Test for AggregateFunction's Display and from_str() implementations.
156+
// For each variant in AggregateFunction, it converts the variant to a string
157157
// and then back to a variant. The test asserts that the original variant and
158158
// the reconstructed variant are the same. This assertion is also necessary for
159159
// function suggestion. See https://github.com/apache/datafusion/issues/8082

datafusion/expr/src/expr.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ use sqlparser::ast::NullTreatment;
109109
/// ## Binary Expressions
110110
///
111111
/// Exprs implement traits that allow easy to understand construction of more
112-
/// complex expresions. For example, to create `c1 + c2` to add columns "c1" and
112+
/// complex expressions. For example, to create `c1 + c2` to add columns "c1" and
113113
/// "c2" together
114114
///
115115
/// ```
@@ -1398,7 +1398,7 @@ impl Expr {
13981398
}
13991399
Ok(TreeNodeRecursion::Continue)
14001400
})
1401-
.expect("traversal is infallable");
1401+
.expect("traversal is infallible");
14021402
}
14031403

14041404
/// Return all references to columns and their occurrence counts in the expression.
@@ -1433,7 +1433,7 @@ impl Expr {
14331433
}
14341434
Ok(TreeNodeRecursion::Continue)
14351435
})
1436-
.expect("traversal is infallable");
1436+
.expect("traversal is infallible");
14371437
}
14381438

14391439
/// Returns true if there are any column references in this Expr

datafusion/expr/src/expr_rewriter/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ pub fn unnormalize_col(expr: Expr) -> Expr {
155155
})
156156
})
157157
.data()
158-
.expect("Unnormalize is infallable")
158+
.expect("Unnormalize is infallible")
159159
}
160160

161161
/// Create a Column from the Scalar Expr
@@ -201,7 +201,7 @@ pub fn strip_outer_reference(expr: Expr) -> Expr {
201201
})
202202
})
203203
.data()
204-
.expect("strip_outer_reference is infallable")
204+
.expect("strip_outer_reference is infallible")
205205
}
206206

207207
/// Returns plan with expressions coerced to types compatible with

datafusion/expr/src/logical_plan/builder.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -412,14 +412,14 @@ impl LogicalPlanBuilder {
412412

413413
/// Add missing sort columns to all downstream projection
414414
///
415-
/// Thus, if you have a LogialPlan that selects A and B and have
415+
/// Thus, if you have a LogicalPlan that selects A and B and have
416416
/// not requested a sort by C, this code will add C recursively to
417417
/// all input projections.
418418
///
419419
/// Adding a new column is not correct if there is a `Distinct`
420420
/// node, which produces only distinct values of its
421421
/// inputs. Adding a new column to its input will result in
422-
/// potententially different results than with the original column.
422+
/// potentially different results than with the original column.
423423
///
424424
/// For example, if the input is like:
425425
///
@@ -1763,7 +1763,7 @@ mod tests {
17631763
.unwrap();
17641764
assert_eq!(&expected, plan.schema().as_ref());
17651765

1766-
// Note scan of "EMPLOYEE_CSV" is treated as a SQL identifer
1766+
// Note scan of "EMPLOYEE_CSV" is treated as a SQL identifier
17671767
// (and thus normalized to "employee"csv") as well
17681768
let projection = None;
17691769
let plan =

datafusion/expr/src/logical_plan/display.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -338,9 +338,9 @@ impl<'a, 'b> PgJsonVisitor<'a, 'b> {
338338
.collect::<Vec<_>>()
339339
.join(", ");
340340

341-
let elipse = if values.len() > 5 { "..." } else { "" };
341+
let eclipse = if values.len() > 5 { "..." } else { "" };
342342

343-
let values_str = format!("{}{}", str_values, elipse);
343+
let values_str = format!("{}{}", str_values, eclipse);
344344
json!({
345345
"Node Type": "Values",
346346
"Values": values_str

datafusion/expr/src/logical_plan/plan.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ pub enum LogicalPlan {
263263
/// Prepare a statement and find any bind parameters
264264
/// (e.g. `?`). This is used to implement SQL-prepared statements.
265265
Prepare(Prepare),
266-
/// Data Manipulaton Language (DML): Insert / Update / Delete
266+
/// Data Manipulation Language (DML): Insert / Update / Delete
267267
Dml(DmlStatement),
268268
/// Data Definition Language (DDL): CREATE / DROP TABLES / VIEWS / SCHEMAS
269269
Ddl(DdlStatement),
@@ -1598,8 +1598,8 @@ impl LogicalPlan {
15981598
})
15991599
.collect();
16001600

1601-
let elipse = if values.len() > 5 { "..." } else { "" };
1602-
write!(f, "Values: {}{}", str_values.join(", "), elipse)
1601+
let eclipse = if values.len() > 5 { "..." } else { "" };
1602+
write!(f, "Values: {}{}", str_values.join(", "), eclipse)
16031603
}
16041604

16051605
LogicalPlan::TableScan(TableScan {

datafusion/expr/src/partition_evaluator.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ pub trait PartitionEvaluator: Debug + Send {
135135
/// must produce an output column with one output row for every
136136
/// input row.
137137
///
138-
/// `num_rows` is requied to correctly compute the output in case
138+
/// `num_rows` is required to correctly compute the output in case
139139
/// `values.len() == 0`
140140
///
141141
/// Implementing this function is an optimization: certain window

datafusion/expr/src/signature.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ pub enum Volatility {
6565
/// automatically coerces (add casts to) function arguments so they match the type signature.
6666
///
6767
/// For example, a function like `cos` may only be implemented for `Float64` arguments. To support a query
68-
/// that calles `cos` with a different argument type, such as `cos(int_column)`, type coercion automatically
68+
/// that calls `cos` with a different argument type, such as `cos(int_column)`, type coercion automatically
6969
/// adds a cast such as `cos(CAST int_column AS DOUBLE)` during planning.
7070
///
7171
/// # Data Types

datafusion/expr/src/type_coercion/binary.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,7 @@ impl From<&DataType> for TypeCategory {
370370
/// The rules in the document provide a clue, but adhering strictly to them doesn't precisely
371371
/// align with the behavior of Postgres. Therefore, we've made slight adjustments to the rules
372372
/// to better match the behavior of both Postgres and DuckDB. For example, we expect adjusted
373-
/// decimal percision and scale when coercing decimal types.
373+
/// decimal precision and scale when coercing decimal types.
374374
pub fn type_union_resolution(data_types: &[DataType]) -> Option<DataType> {
375375
if data_types.is_empty() {
376376
return None;
@@ -718,7 +718,7 @@ pub fn get_wider_type(lhs: &DataType, rhs: &DataType) -> Result<DataType> {
718718
(Int16 | Int32 | Int64, Int8) | (Int32 | Int64, Int16) | (Int64, Int32) |
719719
// Left Float is larger than right Float.
720720
(Float32 | Float64, Float16) | (Float64, Float32) |
721-
// Left String is larget than right String.
721+
// Left String is larger than right String.
722722
(LargeUtf8, Utf8) |
723723
// Any left type is wider than a right hand side Null.
724724
(_, Null) => lhs.clone(),

datafusion/expr/src/type_coercion/functions.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -646,7 +646,7 @@ mod tests {
646646
vec![DataType::UInt8, DataType::UInt16],
647647
Some(vec![DataType::UInt8, DataType::UInt16]),
648648
),
649-
// 2 entries, can coerse values
649+
// 2 entries, can coerce values
650650
(
651651
vec![DataType::UInt16, DataType::UInt16],
652652
vec![DataType::UInt8, DataType::UInt16],

datafusion/expr/src/type_coercion/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
//!
2020
//! Coercion is performed automatically by DataFusion when the types
2121
//! of arguments passed to a function or needed by operators do not
22-
//! exacty match the types required by that function / operator. In
22+
//! exactly match the types required by that function / operator. In
2323
//! this case, DataFusion will attempt to *coerce* the arguments to
2424
//! types accepted by the function by inserting CAST operations.
2525
//!

datafusion/expr/src/utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1212,7 +1212,7 @@ pub fn merge_schema(inputs: Vec<&LogicalPlan>) -> DFSchema {
12121212
}
12131213
}
12141214

1215-
/// Build state name. State is the intermidiate state of the aggregate function.
1215+
/// Build state name. State is the intermediate state of the aggregate function.
12161216
pub fn format_state_name(name: &str, state_name: &str) -> String {
12171217
format!("{name}[{state_name}]")
12181218
}

datafusion/functions-aggregate/src/approx_percentile_cont_with_weight.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ make_udaf_expr_and_func!(
4343
approx_percentile_cont_with_weight_udaf
4444
);
4545

46-
/// APPROX_PERCENTILE_CONT_WITH_WEIGTH aggregate expression
46+
/// APPROX_PERCENTILE_CONT_WITH_WEIGHT aggregate expression
4747
pub struct ApproxPercentileContWithWeight {
4848
signature: Signature,
4949
approx_percentile_cont: ApproxPercentileCont,

datafusion/functions-array/src/remove.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ fn array_remove_internal(
228228
}
229229
}
230230

231-
/// For each element of `list_array[i]`, removed up to `arr_n[i]` occurences
231+
/// For each element of `list_array[i]`, removed up to `arr_n[i]` occurrences
232232
/// of `element_array[i]`.
233233
///
234234
/// The type of each **element** in `list_array` must be the same as the type of

datafusion/functions/src/core/arrow_cast.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -444,7 +444,7 @@ fn is_separator(c: char) -> bool {
444444
}
445445

446446
#[derive(Debug)]
447-
/// Splits a strings like Dictionary(Int32, Int64) into tokens sutable for parsing
447+
/// Splits a strings like Dictionary(Int32, Int64) into tokens suitable for parsing
448448
///
449449
/// For example the string "Timestamp(Nanosecond, None)" would be parsed into:
450450
///

datafusion/functions/src/datetime/to_local_time.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ impl ToLocalTimeFunc {
8484
let arg_type = time_value.data_type();
8585
match arg_type {
8686
DataType::Timestamp(_, None) => {
87-
// if no timezone specificed, just return the input
87+
// if no timezone specified, just return the input
8888
Ok(time_value.clone())
8989
}
9090
// If has timezone, adjust the underlying time value. The current time value
@@ -165,7 +165,7 @@ impl ToLocalTimeFunc {
165165

166166
match array.data_type() {
167167
Timestamp(_, None) => {
168-
// if no timezone specificed, just return the input
168+
// if no timezone specified, just return the input
169169
Ok(time_value.clone())
170170
}
171171
Timestamp(Nanosecond, Some(_)) => {

datafusion/functions/src/regex/regexpreplace.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -562,7 +562,7 @@ mod tests {
562562
#[test]
563563
fn test_static_pattern_regexp_replace_pattern_error() {
564564
let values = StringArray::from(vec!["abc"; 5]);
565-
// Delibaretely using an invalid pattern to see how the single pattern
565+
// Deliberately using an invalid pattern to see how the single pattern
566566
// error is propagated on regexp_replace.
567567
let patterns = StringArray::from(vec!["["; 5]);
568568
let replacements = StringArray::from(vec!["foo"; 5]);

datafusion/functions/src/unicode/substrindex.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -122,15 +122,15 @@ pub fn substr_index<T: OffsetSizeTrait>(args: &[ArrayRef]) -> Result<ArrayRef> {
122122

123123
let occurrences = usize::try_from(n.unsigned_abs()).unwrap_or(usize::MAX);
124124
let length = if n > 0 {
125-
let splitted = string.split(delimiter);
126-
splitted
125+
let split = string.split(delimiter);
126+
split
127127
.take(occurrences)
128128
.map(|s| s.len() + delimiter.len())
129129
.sum::<usize>()
130130
- delimiter.len()
131131
} else {
132-
let splitted = string.rsplit(delimiter);
133-
splitted
132+
let split = string.rsplit(delimiter);
133+
split
134134
.take(occurrences)
135135
.map(|s| s.len() + delimiter.len())
136136
.sum::<usize>()

datafusion/optimizer/src/analyzer/subquery.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -159,11 +159,11 @@ fn check_inner_plan(
159159
let (correlated, _): (Vec<_>, Vec<_>) = split_conjunction(predicate)
160160
.into_iter()
161161
.partition(|e| e.contains_outer());
162-
let maybe_unsupport = correlated
162+
let maybe_unsupported = correlated
163163
.into_iter()
164164
.filter(|expr| !can_pullup_over_aggregation(expr))
165165
.collect::<Vec<_>>();
166-
if is_aggregate && is_scalar && !maybe_unsupport.is_empty() {
166+
if is_aggregate && is_scalar && !maybe_unsupported.is_empty() {
167167
return plan_err!(
168168
"Correlated column is not allowed in predicate: {predicate}"
169169
);

datafusion/optimizer/src/common_subexpr_eliminate.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ impl CommonSubexprEliminate {
248248
}
249249

250250
/// Rewrites the expression in `exprs_list` with common sub-expressions
251-
/// replaced with a new colum and adds a ProjectionExec on top of `input`
251+
/// replaced with a new column and adds a ProjectionExec on top of `input`
252252
/// which computes any replaced common sub-expressions.
253253
///
254254
/// Returns a tuple of:
@@ -636,7 +636,7 @@ impl CommonSubexprEliminate {
636636
/// Returns the window expressions, and the input to the deepest child
637637
/// LogicalPlan.
638638
///
639-
/// For example, if the input widnow looks like
639+
/// For example, if the input window looks like
640640
///
641641
/// ```text
642642
/// LogicalPlan::Window(exprs=[a, b, c])

datafusion/optimizer/src/decorrelate_predicate_subquery.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1232,7 +1232,7 @@ mod tests {
12321232
}
12331233

12341234
#[test]
1235-
fn in_subquery_muti_project_subquery_cols() -> Result<()> {
1235+
fn in_subquery_multi_project_subquery_cols() -> Result<()> {
12361236
let table_scan = test_table_scan()?;
12371237
let subquery_scan = test_table_scan_with_name("sq")?;
12381238

datafusion/optimizer/src/optimize_projections/required_indices.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ impl RequiredIndicies {
160160
(l, r.map_indices(|idx| idx - n))
161161
}
162162

163-
/// Partitions the indicies in this instance into two groups based on the
163+
/// Partitions the indices in this instance into two groups based on the
164164
/// given predicate function `f`.
165165
fn partition<F>(&self, f: F) -> (Self, Self)
166166
where

datafusion/optimizer/src/push_down_filter.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1913,7 +1913,7 @@ mod tests {
19131913
assert_optimized_plan_eq(plan, expected)
19141914
}
19151915

1916-
/// post-join predicates with columns from both sides are converted to join filterss
1916+
/// post-join predicates with columns from both sides are converted to join filters
19171917
#[test]
19181918
fn filter_join_on_common_dependent() -> Result<()> {
19191919
let table_scan = test_table_scan()?;

datafusion/optimizer/src/simplify_expressions/expr_simplifier.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -478,7 +478,7 @@ struct ConstEvaluator<'a> {
478478
#[allow(dead_code)]
479479
/// The simplify result of ConstEvaluator
480480
enum ConstSimplifyResult {
481-
// Expr was simplifed and contains the new expression
481+
// Expr was simplified and contains the new expression
482482
Simplified(ScalarValue),
483483
// Expr was not simplified and original value is returned
484484
NotSimplified(ScalarValue),
@@ -519,7 +519,7 @@ impl<'a> TreeNodeRewriter for ConstEvaluator<'a> {
519519
fn f_up(&mut self, expr: Expr) -> Result<Transformed<Expr>> {
520520
match self.can_evaluate.pop() {
521521
// Certain expressions such as `CASE` and `COALESCE` are short circuiting
522-
// and may not evalute all their sub expressions. Thus if
522+
// and may not evaluate all their sub expressions. Thus if
523523
// if any error is countered during simplification, return the original
524524
// so that normal evaluation can occur
525525
Some(true) => {

datafusion/optimizer/src/unwrap_cast_in_comparison.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -893,7 +893,7 @@ mod tests {
893893
DataType::Timestamp(TimeUnit::Nanosecond, utc)
894894
}
895895

896-
// a dictonary type for storing string tags
896+
// a dictionary type for storing string tags
897897
fn dictionary_tag_type() -> DataType {
898898
DataType::Dictionary(Box::new(DataType::Int32), Box::new(DataType::Utf8))
899899
}

datafusion/physical-expr-common/src/aggregate/groups_accumulator/accumulate.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,7 @@ pub fn accumulate_indices<F>(
410410
},
411411
);
412412

413-
// handle any remaining bits (after the intial 64)
413+
// handle any remaining bits (after the initial 64)
414414
let remainder_bits = bit_chunks.remainder_bits();
415415
group_indices_remainder
416416
.iter()
@@ -835,7 +835,7 @@ mod test {
835835
}
836836
}
837837

838-
/// Parallel implementaiton of NullState to check expected values
838+
/// Parallel implementation of NullState to check expected values
839839
#[derive(Debug, Default)]
840840
struct MockNullState {
841841
/// group indices that had values that passed the filter

datafusion/physical-expr-common/src/aggregate/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -346,7 +346,7 @@ impl AggregateExpr for AggregateFunctionExpr {
346346
let accumulator = self.fun.create_sliding_accumulator(args)?;
347347

348348
// Accumulators that have window frame startings different
349-
// than `UNBOUNDED PRECEDING`, such as `1 PRECEEDING`, need to
349+
// than `UNBOUNDED PRECEDING`, such as `1 PRECEDING`, need to
350350
// implement retract_batch method in order to run correctly
351351
// currently in DataFusion.
352352
//
@@ -377,7 +377,7 @@ impl AggregateExpr for AggregateFunctionExpr {
377377
// 3. Third sum we add to the state sum value between `[2, 3)`
378378
// (`[0, 2)` is already in the state sum). Also we need to
379379
// retract values between `[0, 1)` by this way we can obtain sum
380-
// between [1, 3) which is indeed the apropriate range.
380+
// between [1, 3) which is indeed the appropriate range.
381381
//
382382
// When we use `UNBOUNDED PRECEDING` in the query starting
383383
// index will always be 0 for the desired range, and hence the

datafusion/physical-expr-common/src/binary_map.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -355,7 +355,7 @@ where
355355
assert_eq!(values.len(), batch_hashes.len());
356356

357357
for (value, &hash) in values.iter().zip(batch_hashes.iter()) {
358-
// hande null value
358+
// handle null value
359359
let Some(value) = value else {
360360
let payload = if let Some(&(payload, _offset)) = self.null.as_ref() {
361361
payload
@@ -439,7 +439,7 @@ where
439439
// Put the small values into buffer and offsets so it
440440
// appears the output array, and store that offset
441441
// so the bytes can be compared if needed
442-
let offset = self.buffer.len(); // offset of start fof data
442+
let offset = self.buffer.len(); // offset of start for data
443443
self.buffer.append_slice(value);
444444
self.offsets.push(O::usize_as(self.buffer.len()));
445445

datafusion/physical-expr-common/src/expressions/column.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ impl PhysicalExpr for Column {
8080
Ok(input_schema.field(self.index).data_type().clone())
8181
}
8282

83-
/// Decide whehter this expression is nullable, given the schema of the input
83+
/// Decide whether this expression is nullable, given the schema of the input
8484
fn nullable(&self, input_schema: &Schema) -> Result<bool> {
8585
self.bounds_check(input_schema)?;
8686
Ok(input_schema.field(self.index).is_nullable())

0 commit comments

Comments
 (0)