Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Retract IndexSet, IndexMap type alias #13655

Merged
merged 2 commits into from
Dec 5, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Retract IndexSet, IndexMap type alias
  • Loading branch information
akurmustafa committed Dec 5, 2024
commit c702a17c0b1f26bae0e0d3e054b800cbe30f0231
2 changes: 1 addition & 1 deletion datafusion/common/src/cse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ use crate::tree_node::{
Transformed, TransformedResult, TreeNode, TreeNodeRecursion, TreeNodeRewriter,
TreeNodeVisitor,
};
use crate::IndexMap;
use crate::Result;
use indexmap::IndexMap;
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash, Hasher, RandomState};
use std::marker::PhantomData;
Expand Down
4 changes: 0 additions & 4 deletions datafusion/common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ pub use param_value::ParamValues;
pub use scalar::{ScalarType, ScalarValue};
pub use schema_reference::SchemaReference;
pub use stats::{ColumnStatistics, Statistics};
use std::hash::RandomState;
pub use table_reference::{ResolvedTableReference, TableReference};
pub use unnest::{RecursionUnnestOption, UnnestOptions};
pub use utils::project_schema;
Expand All @@ -94,9 +93,6 @@ pub use error::{
pub type HashMap<K, V, S = DefaultHashBuilder> = hashbrown::HashMap<K, V, S>;
pub type HashSet<T, S = DefaultHashBuilder> = hashbrown::HashSet<T, S>;

pub type IndexMap<T, S = DefaultHashBuilder> = indexmap::IndexMap<T, S>;
pub type IndexSet<T, S = RandomState> = indexmap::IndexSet<T, S>;

/// Downcast an Arrow Array to a concrete type, return an `DataFusionError::Internal` if the cast is
/// not possible. In normal usage of DataFusion the downcast should always succeed.
///
Expand Down
1 change: 1 addition & 0 deletions datafusion/expr/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ datafusion-expr-common = { workspace = true }
datafusion-functions-aggregate-common = { workspace = true }
datafusion-functions-window-common = { workspace = true }
datafusion-physical-expr-common = { workspace = true }
indexmap = { workspace = true }
paste = "^1.0"
recursive = { workspace = true }
serde_json = { workspace = true }
Expand Down
4 changes: 3 additions & 1 deletion datafusion/expr/src/logical_plan/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,13 @@ use datafusion_common::file_options::file_type::FileType;
use datafusion_common::{
exec_err, get_target_functional_dependencies, internal_err, not_impl_err,
plan_datafusion_err, plan_err, Column, DFSchema, DFSchemaRef, DataFusionError,
FunctionalDependencies, IndexSet, Result, ScalarValue, TableReference, ToDFSchema,
FunctionalDependencies, Result, ScalarValue, TableReference, ToDFSchema,
UnnestOptions,
};
use datafusion_expr_common::type_coercion::binary::type_union_resolution;

use indexmap::IndexSet;

/// Default table name for unnamed table
pub const UNNAMED_TABLE: &str = "?table?";

Expand Down
3 changes: 2 additions & 1 deletion datafusion/expr/src/logical_plan/plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,10 @@ use datafusion_common::tree_node::{
use datafusion_common::{
aggregate_functional_dependencies, internal_err, plan_err, Column, Constraints,
DFSchema, DFSchemaRef, DataFusionError, Dependency, FunctionalDependence,
FunctionalDependencies, IndexSet, ParamValues, Result, ScalarValue, TableReference,
FunctionalDependencies, ParamValues, Result, ScalarValue, TableReference,
UnnestOptions,
};
use indexmap::IndexSet;

// backwards compatibility
use crate::display::PgJsonVisitor;
Expand Down
3 changes: 2 additions & 1 deletion datafusion/expr/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,10 @@ use datafusion_common::tree_node::{
use datafusion_common::utils::get_at_indices;
use datafusion_common::{
internal_err, plan_datafusion_err, plan_err, Column, DFSchema, DFSchemaRef,
DataFusionError, HashMap, IndexSet, Result, TableReference,
DataFusionError, HashMap, Result, TableReference,
};

use indexmap::IndexSet;
use sqlparser::ast::{ExceptSelectItem, ExcludeSelectItem};

pub use datafusion_functions_aggregate_common::order::AggregateOrderSensitivity;
Expand Down
5 changes: 4 additions & 1 deletion datafusion/optimizer/src/eliminate_duplicated_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,13 @@
use crate::optimizer::ApplyOrder;
use crate::{OptimizerConfig, OptimizerRule};
use datafusion_common::tree_node::Transformed;
use datafusion_common::{IndexSet, Result};
use datafusion_common::Result;
use datafusion_expr::logical_plan::LogicalPlan;
use datafusion_expr::{Aggregate, Expr, Sort, SortExpr};
use std::hash::{Hash, Hasher};

use indexmap::IndexSet;

/// Optimization rule that eliminate duplicated expr.
#[derive(Default, Debug)]
pub struct EliminateDuplicatedExpr;
Expand Down
3 changes: 2 additions & 1 deletion datafusion/optimizer/src/push_down_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,14 @@
use std::collections::{HashMap, HashSet};
use std::sync::Arc;

use indexmap::IndexSet;
use itertools::Itertools;

use datafusion_common::tree_node::{
Transformed, TransformedResult, TreeNode, TreeNodeRecursion,
};
use datafusion_common::{
internal_err, plan_err, qualified_name, Column, DFSchema, IndexSet, Result,
internal_err, plan_err, qualified_name, Column, DFSchema, Result,
};
use datafusion_expr::expr_rewriter::replace_col;
use datafusion_expr::logical_plan::{Join, JoinType, LogicalPlan, TableScan, Union};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@ use datafusion_common::{
cast::{as_large_list_array, as_list_array},
tree_node::{Transformed, TransformedResult, TreeNode, TreeNodeRewriter},
};
use datafusion_common::{
internal_err, DFSchema, DataFusionError, IndexSet, Result, ScalarValue,
};
use datafusion_common::{internal_err, DFSchema, DataFusionError, Result, ScalarValue};
use datafusion_expr::simplify::ExprSimplifyResult;
use datafusion_expr::{
and, lit, or, BinaryExpr, Case, ColumnarValue, Expr, Like, Operator, Volatility,
Expand All @@ -50,6 +48,7 @@ use crate::analyzer::type_coercion::TypeCoercionRewriter;
use crate::simplify_expressions::guarantees::GuaranteeRewriter;
use crate::simplify_expressions::regex::simplify_regex_expr;
use crate::simplify_expressions::SimplifyInfo;
use indexmap::IndexSet;
use regex::Regex;

use super::inlist_simplifier::ShortenInListSimplifier;
Expand Down
1 change: 1 addition & 0 deletions datafusion/physical-expr/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ datafusion-functions-aggregate-common = { workspace = true }
datafusion-physical-expr-common = { workspace = true }
half = { workspace = true }
hashbrown = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true, features = ["use_std"] }
log = { workspace = true }
paste = "^1.0"
Expand Down
5 changes: 2 additions & 3 deletions datafusion/physical-expr/src/equivalence/properties.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,12 @@ use crate::{

use arrow_schema::{SchemaRef, SortOptions};
use datafusion_common::tree_node::{Transformed, TransformedResult, TreeNode};
use datafusion_common::{
internal_err, plan_err, IndexMap, IndexSet, JoinSide, JoinType, Result,
};
use datafusion_common::{internal_err, plan_err, JoinSide, JoinType, Result};
use datafusion_expr::interval_arithmetic::Interval;
use datafusion_expr::sort_properties::{ExprProperties, SortProperties};
use datafusion_physical_expr_common::utils::ExprPropertiesNode;

use indexmap::{IndexMap, IndexSet};
use itertools::Itertools;

/// A `EquivalenceProperties` object stores information known about the output
Expand Down
4 changes: 3 additions & 1 deletion datafusion/physical-expr/src/window/window_expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,14 @@ use arrow::compute::SortOptions;
use arrow::datatypes::Field;
use arrow::record_batch::RecordBatch;
use datafusion_common::utils::compare_rows;
use datafusion_common::{internal_err, DataFusionError, IndexMap, Result, ScalarValue};
use datafusion_common::{internal_err, DataFusionError, Result, ScalarValue};
use datafusion_expr::window_state::{
PartitionBatchState, WindowAggState, WindowFrameContext, WindowFrameStateGroups,
};
use datafusion_expr::{Accumulator, PartitionEvaluator, WindowFrame, WindowFrameBound};

use indexmap::IndexMap;

/// Common trait for [window function] implementations
///
/// # Aggregate Window Expressions
Expand Down
1 change: 1 addition & 0 deletions datafusion/physical-plan/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ datafusion-physical-expr-common = { workspace = true }
futures = { workspace = true }
half = { workspace = true }
hashbrown = { workspace = true }
indexmap = { workspace = true }
itertools = { workspace = true, features = ["use_std"] }
log = { workspace = true }
once_cell = "1.18.0"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ use datafusion_common::utils::{
evaluate_partition_ranges, get_at_indices, get_row_at_idx,
};
use datafusion_common::{
arrow_datafusion_err, exec_err, DataFusionError, HashMap, IndexMap, Result,
arrow_datafusion_err, exec_err, DataFusionError, HashMap, Result,
};
use datafusion_execution::TaskContext;
use datafusion_expr::window_state::{PartitionBatchState, WindowAggState};
Expand All @@ -62,9 +62,11 @@ use datafusion_physical_expr::window::{
};
use datafusion_physical_expr::PhysicalExpr;
use datafusion_physical_expr_common::sort_expr::{LexOrdering, LexRequirement};

use futures::stream::Stream;
use futures::{ready, StreamExt};
use hashbrown::raw::RawTable;
use indexmap::IndexMap;
use log::debug;

/// Window execution plan
Expand Down
1 change: 1 addition & 0 deletions datafusion/sql/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ arrow-array = { workspace = true }
arrow-schema = { workspace = true }
datafusion-common = { workspace = true, default-features = true }
datafusion-expr = { workspace = true }
indexmap = { workspace = true }
log = { workspace = true }
recursive = { workspace = true }
regex = { workspace = true }
Expand Down
4 changes: 3 additions & 1 deletion datafusion/sql/src/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use crate::utils::{
};

use datafusion_common::tree_node::{TreeNode, TreeNodeRecursion};
use datafusion_common::{not_impl_err, plan_err, IndexMap, Result};
use datafusion_common::{not_impl_err, plan_err, Result};
use datafusion_common::{RecursionUnnestOption, UnnestOptions};
use datafusion_expr::expr::{Alias, PlannedReplaceSelectItem, WildcardOptions};
use datafusion_expr::expr_rewriter::{
Expand All @@ -38,6 +38,8 @@ use datafusion_expr::{
qualified_wildcard_with_options, wildcard_with_options, Aggregate, Expr, Filter,
GroupingSet, LogicalPlan, LogicalPlanBuilder, Partitioning,
};

use indexmap::IndexMap;
use sqlparser::ast::{
Distinct, Expr as SQLExpr, GroupByExpr, NamedWindowExpr, OrderByExpr,
WildcardAdditionalOptions, WindowType,
Expand Down
4 changes: 3 additions & 1 deletion datafusion/sql/src/unparser/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,14 @@ use std::{cmp::Ordering, sync::Arc, vec};
use datafusion_common::{
internal_err,
tree_node::{Transformed, TransformedResult, TreeNode},
Column, DataFusionError, IndexSet, Result, ScalarValue,
Column, DataFusionError, Result, ScalarValue,
};
use datafusion_expr::{
expr, utils::grouping_set_to_exprlist, Aggregate, Expr, LogicalPlan,
LogicalPlanBuilder, Projection, SortExpr, Unnest, Window,
};

use indexmap::IndexSet;
use sqlparser::ast;

use super::{
Expand Down
4 changes: 3 additions & 1 deletion datafusion/sql/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,16 @@ use datafusion_common::tree_node::{
};
use datafusion_common::{
exec_err, internal_err, plan_err, Column, DFSchemaRef, DataFusionError, HashMap,
IndexMap, Result, ScalarValue,
Result, ScalarValue,
};
use datafusion_expr::builder::get_struct_unnested_columns;
use datafusion_expr::expr::{Alias, GroupingSet, Unnest, WindowFunction};
use datafusion_expr::utils::{expr_as_column_expr, find_column_exprs};
use datafusion_expr::{
col, expr_vec_fmt, ColumnUnnestList, Expr, ExprSchemable, LogicalPlan,
};

use indexmap::IndexMap;
use sqlparser::ast::{Ident, Value};

/// Make a best-effort attempt at resolving all columns in the expression tree
Expand Down
Loading