OmniSciDB  0fdbebe030
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
anonymous_namespace{RelAlgExecutor.cpp} Namespace Reference

Classes

class  RexUsedInputsVisitor
 

Functions

bool node_is_aggregate (const RelAlgNode *ra)
 
std::unordered_set< PhysicalInputget_physical_inputs (const Catalog_Namespace::Catalog &cat, const RelAlgNode *ra)
 
void check_sort_node_source_constraint (const RelSort *sort)
 
const RelAlgNodeget_data_sink (const RelAlgNode *ra_node)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelAggregate *aggregate, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelProject *project, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelTableFunction *table_func, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelFilter *filter, const Catalog_Namespace::Catalog &cat)
 
int table_id_from_ra (const RelAlgNode *ra_node)
 
std::unordered_map< const
RelAlgNode *, int > 
get_input_nest_levels (const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_join_source_used_inputs (const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
 
std::vector< const RelAlgNode * > get_non_join_sequence (const RelAlgNode *ra)
 
void collect_used_input_desc (std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
template<class RA >
std::pair< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor > > > 
get_input_desc_impl (const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
template<class RA >
std::tuple< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor >
>, std::vector
< std::shared_ptr< RexInput > > > 
get_input_desc (const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
size_t get_scalar_sources_size (const RelCompound *compound)
 
size_t get_scalar_sources_size (const RelProject *project)
 
size_t get_scalar_sources_size (const RelTableFunction *table_func)
 
const RexScalarscalar_at (const size_t i, const RelCompound *compound)
 
const RexScalarscalar_at (const size_t i, const RelProject *project)
 
const RexScalarscalar_at (const size_t i, const RelTableFunction *table_func)
 
std::shared_ptr< Analyzer::Exprset_transient_dict (const std::shared_ptr< Analyzer::Expr > expr)
 
void set_transient_dict_maybe (std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
 
std::shared_ptr< Analyzer::Exprcast_dict_to_none (const std::shared_ptr< Analyzer::Expr > &input)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources (const RA *ra_node, const RelAlgTranslator &translator, const ::ExecutorType executor_type)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources_for_update (const RA *ra_node, const RelAlgTranslator &translator, int32_t tableId, const Catalog_Namespace::Catalog &cat, const ColumnNameList &colNames, size_t starting_projection_column_idx)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelCompound *compound, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelAggregate *aggregate, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
QualsConjunctiveForm translate_quals (const RelCompound *compound, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelCompound *compound, const RelAlgTranslator &translator, const ExecutorType executor_type)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelAggregate *aggregate, const RelAlgTranslator &translator)
 
bool is_count_distinct (const Analyzer::Expr *expr)
 
bool is_agg (const Analyzer::Expr *expr)
 
SQLTypeInfo get_logical_type_for_expr (const Analyzer::Expr &expr)
 
template<class RA >
std::vector< TargetMetaInfoget_targets_meta (const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
 
bool is_window_execution_unit (const RelAlgExecutionUnit &ra_exe_unit)
 
std::shared_ptr< Analyzer::Exprtransform_to_inner (const Analyzer::Expr *expr)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const ColumnDescriptor *cd, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
std::list< Analyzer::OrderEntryget_order_entries (const RelSort *sort)
 
size_t get_scan_limit (const RelAlgNode *ra, const size_t limit)
 
bool first_oe_is_desc (const std::list< Analyzer::OrderEntry > &order_entries)
 
size_t groups_approx_upper_bound (const std::vector< InputTableInfo > &table_infos)
 
bool compute_output_buffer_size (const RelAlgExecutionUnit &ra_exe_unit)
 
bool exe_unit_has_quals (const RelAlgExecutionUnit ra_exe_unit)
 
RelAlgExecutionUnit decide_approx_count_distinct_implementation (const RelAlgExecutionUnit &ra_exe_unit_in, const std::vector< InputTableInfo > &table_infos, const Executor *executor, const ExecutorDeviceType device_type_in, std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned)
 
void build_render_targets (RenderInfo &render_info, const std::vector< Analyzer::Expr * > &work_unit_target_exprs, const std::vector< TargetMetaInfo > &targets_meta)
 
bool can_use_bump_allocator (const RelAlgExecutionUnit &ra_exe_unit, const CompilationOptions &co, const ExecutionOptions &eo)
 
JoinType get_join_type (const RelAlgNode *ra)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals (const RexScalar *scalar)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals_conjunction (const RexScalar *scalar)
 
std::vector< JoinTypeleft_deep_join_types (const RelLeftDeepInnerJoin *left_deep_join)
 
template<class RA >
std::vector< size_t > do_table_reordering (std::vector< InputDescriptor > &input_descs, std::list< std::shared_ptr< const InputColDescriptor >> &input_col_descs, const JoinQualsPerNestingLevel &left_deep_join_quals, std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const RA *node, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::vector< size_t > get_left_deep_join_input_sizes (const RelLeftDeepInnerJoin *left_deep_join)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
rewrite_quals (const std::list< std::shared_ptr< Analyzer::Expr >> &quals)
 
std::vector< const RexScalar * > rex_to_conjunctive_form (const RexScalar *qual_expr)
 
std::shared_ptr< Analyzer::Exprbuild_logical_expression (const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
 
template<class QualsList >
bool list_contains_expression (const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
 
std::shared_ptr< Analyzer::Exprreverse_logical_distribution (const std::shared_ptr< Analyzer::Expr > &expr)
 
std::vector< std::shared_ptr
< Analyzer::Expr > > 
synthesize_inputs (const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
std::pair< std::vector
< TargetMetaInfo >
, std::vector< std::shared_ptr
< Analyzer::Expr > > > 
get_inputs_meta (const RelFilter *filter, const RelAlgTranslator &translator, const std::vector< std::shared_ptr< RexInput >> &inputs_owned, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 

Function Documentation

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::build_logical_expression ( const std::vector< std::shared_ptr< Analyzer::Expr >> &  factors,
const SQLOps  sql_op 
)

Definition at line 3121 of file RelAlgExecutor.cpp.

References CHECK(), kONE, and Parser::OperExpr::normalize().

Referenced by reverse_logical_distribution().

3123  {
3124  CHECK(!factors.empty());
3125  auto acc = factors.front();
3126  for (size_t i = 1; i < factors.size(); ++i) {
3127  acc = Parser::OperExpr::normalize(sql_op, kONE, acc, factors[i]);
3128  }
3129  return acc;
3130 }
CHECK(cgen_state)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:259
Definition: sqldefs.h:69

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::build_render_targets ( RenderInfo render_info,
const std::vector< Analyzer::Expr * > &  work_unit_target_exprs,
const std::vector< TargetMetaInfo > &  targets_meta 
)

Definition at line 2423 of file RelAlgExecutor.cpp.

References CHECK_EQ, and RenderInfo::targets.

Referenced by RelAlgExecutor::executeWorkUnit().

2425  {
2426  CHECK_EQ(work_unit_target_exprs.size(), targets_meta.size());
2427  render_info.targets.clear();
2428  for (size_t i = 0; i < targets_meta.size(); ++i) {
2429  render_info.targets.emplace_back(std::make_shared<Analyzer::TargetEntry>(
2430  targets_meta[i].get_resname(),
2431  work_unit_target_exprs[i]->get_shared_ptr(),
2432  false));
2433  }
2434 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::vector< std::shared_ptr< Analyzer::TargetEntry > > targets
Definition: RenderInfo.h:37

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::can_use_bump_allocator ( const RelAlgExecutionUnit ra_exe_unit,
const CompilationOptions co,
const ExecutionOptions eo 
)
inline

Definition at line 2436 of file RelAlgExecutor.cpp.

References CompilationOptions::device_type, g_enable_bump_allocator, GPU, SortInfo::order_entries, ExecutionOptions::output_columnar_hint, and RelAlgExecutionUnit::sort_info.

Referenced by RelAlgExecutor::executeWorkUnit().

2438  {
2440  !eo.output_columnar_hint && ra_exe_unit.sort_info.order_entries.empty();
2441 }
bool g_enable_bump_allocator
Definition: Execute.cpp:100
const std::list< Analyzer::OrderEntry > order_entries
const SortInfo sort_info
ExecutorDeviceType device_type

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::cast_dict_to_none ( const std::shared_ptr< Analyzer::Expr > &  input)

Definition at line 1018 of file RelAlgExecutor.cpp.

References kENCODING_DICT, and kTEXT.

Referenced by translate_scalar_sources(), and translate_targets().

1019  {
1020  const auto& input_ti = input->get_type_info();
1021  if (input_ti.is_string() && input_ti.get_compression() == kENCODING_DICT) {
1022  return input->add_cast(SQLTypeInfo(kTEXT, input_ti.get_notnull()));
1023  }
1024  return input;
1025 }
Definition: sqltypes.h:53

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::check_sort_node_source_constraint ( const RelSort sort)
inline

Definition at line 336 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

336  {
337  CHECK_EQ(size_t(1), sort->inputCount());
338  const auto source = sort->getInput(0);
339  if (dynamic_cast<const RelSort*>(source)) {
340  throw std::runtime_error("Sort node not supported as input to another sort");
341  }
342 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::collect_used_input_desc ( std::vector< InputDescriptor > &  input_descs,
const Catalog_Namespace::Catalog cat,
std::unordered_set< std::shared_ptr< const InputColDescriptor >> &  input_col_descs_unique,
const RelAlgNode ra_node,
const std::unordered_set< const RexInput * > &  source_used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 865 of file RelAlgExecutor.cpp.

References get_data_sink(), get_non_join_sequence(), Catalog_Namespace::Catalog::getColumnIdBySpi(), and table_id_from_ra().

Referenced by get_input_desc_impl().

871  {
872  std::unordered_set<InputDescriptor> input_descs_unique(input_descs.begin(),
873  input_descs.end());
874  const auto non_join_src_seq = get_non_join_sequence(get_data_sink(ra_node));
875  std::unordered_map<const RelAlgNode*, int> non_join_to_nest_level;
876  for (const auto node : non_join_src_seq) {
877  non_join_to_nest_level.insert(std::make_pair(node, non_join_to_nest_level.size()));
878  }
879  for (const auto used_input : source_used_inputs) {
880  const auto input_ra = used_input->getSourceNode();
881  const int table_id = table_id_from_ra(input_ra);
882  const auto col_id = used_input->getIndex();
883  auto it = input_to_nest_level.find(input_ra);
884  if (it == input_to_nest_level.end()) {
885  throw std::runtime_error("Bushy joins not supported");
886  }
887  const int input_desc = it->second;
888  input_col_descs_unique.insert(std::make_shared<const InputColDescriptor>(
889  dynamic_cast<const RelScan*>(input_ra)
890  ? cat.getColumnIdBySpi(table_id, col_id + 1)
891  : col_id,
892  table_id,
893  input_desc));
894  }
895 }
int table_id_from_ra(const RelAlgNode *ra_node)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
std::vector< const RelAlgNode * > get_non_join_sequence(const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1526

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::compute_output_buffer_size ( const RelAlgExecutionUnit ra_exe_unit)

Determines whether a query needs to compute the size of its output buffer. Returns true for projection queries with no LIMIT or a LIMIT that exceeds the high scan limit threshold (meaning it would be cheaper to compute the number of rows passing or use the bump allocator than allocate the current scan limit per GPU)

Definition at line 2335 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::groupby_exprs, Executor::high_scan_limit, RelAlgExecutionUnit::scan_limit, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

2335  {
2336  for (const auto target_expr : ra_exe_unit.target_exprs) {
2337  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
2338  return false;
2339  }
2340  }
2341  if (ra_exe_unit.groupby_exprs.size() == 1 && !ra_exe_unit.groupby_exprs.front() &&
2342  (!ra_exe_unit.scan_limit || ra_exe_unit.scan_limit > Executor::high_scan_limit)) {
2343  return true;
2344  }
2345  return false;
2346 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
static const size_t high_scan_limit
Definition: Execute.h:407

+ Here is the caller graph for this function:

RelAlgExecutionUnit anonymous_namespace{RelAlgExecutor.cpp}::decide_approx_count_distinct_implementation ( const RelAlgExecutionUnit ra_exe_unit_in,
const std::vector< InputTableInfo > &  table_infos,
const Executor executor,
const ExecutorDeviceType  device_type_in,
std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned 
)

Definition at line 2353 of file RelAlgExecutor.cpp.

References Bitmap, CHECK(), CHECK_GE, g_bigint_count, g_cluster, g_hll_precision_bits, get_agg_type(), get_count_distinct_sub_bitmap_count(), get_target_info(), getExpressionRange(), GPU, hll_size_for_rate(), Integer, kAPPROX_COUNT_DISTINCT, kCOUNT, kENCODING_DICT, kINT, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit(), and RelAlgExecutor::handleOutOfMemoryRetry().

2358  {
2359  RelAlgExecutionUnit ra_exe_unit = ra_exe_unit_in;
2360  for (size_t i = 0; i < ra_exe_unit.target_exprs.size(); ++i) {
2361  const auto target_expr = ra_exe_unit.target_exprs[i];
2362  const auto agg_info = get_target_info(target_expr, g_bigint_count);
2363  if (agg_info.agg_kind != kAPPROX_COUNT_DISTINCT) {
2364  continue;
2365  }
2366  CHECK(dynamic_cast<const Analyzer::AggExpr*>(target_expr));
2367  const auto arg = static_cast<Analyzer::AggExpr*>(target_expr)->get_own_arg();
2368  CHECK(arg);
2369  const auto& arg_ti = arg->get_type_info();
2370  // Avoid calling getExpressionRange for variable length types (string and array),
2371  // it'd trigger an assertion since that API expects to be called only for types
2372  // for which the notion of range is well-defined. A bit of a kludge, but the
2373  // logic to reject these types anyway is at lower levels in the stack and not
2374  // really worth pulling into a separate function for now.
2375  if (!(arg_ti.is_number() || arg_ti.is_boolean() || arg_ti.is_time() ||
2376  (arg_ti.is_string() && arg_ti.get_compression() == kENCODING_DICT))) {
2377  continue;
2378  }
2379  const auto arg_range = getExpressionRange(arg.get(), table_infos, executor);
2380  if (arg_range.getType() != ExpressionRangeType::Integer) {
2381  continue;
2382  }
2383  // When running distributed, the threshold for using the precise implementation
2384  // must be consistent across all leaves, otherwise we could have a mix of precise
2385  // and approximate bitmaps and we cannot aggregate them.
2386  const auto device_type = g_cluster ? ExecutorDeviceType::GPU : device_type_in;
2387  const auto bitmap_sz_bits = arg_range.getIntMax() - arg_range.getIntMin() + 1;
2388  const auto sub_bitmap_count =
2389  get_count_distinct_sub_bitmap_count(bitmap_sz_bits, ra_exe_unit, device_type);
2390  int64_t approx_bitmap_sz_bits{0};
2391  const auto error_rate =
2392  static_cast<Analyzer::AggExpr*>(target_expr)->get_error_rate();
2393  if (error_rate) {
2394  CHECK(error_rate->get_type_info().get_type() == kINT);
2395  CHECK_GE(error_rate->get_constval().intval, 1);
2396  approx_bitmap_sz_bits = hll_size_for_rate(error_rate->get_constval().intval);
2397  } else {
2398  approx_bitmap_sz_bits = g_hll_precision_bits;
2399  }
2400  CountDistinctDescriptor approx_count_distinct_desc{CountDistinctImplType::Bitmap,
2401  arg_range.getIntMin(),
2402  approx_bitmap_sz_bits,
2403  true,
2404  device_type,
2405  sub_bitmap_count};
2406  CountDistinctDescriptor precise_count_distinct_desc{CountDistinctImplType::Bitmap,
2407  arg_range.getIntMin(),
2408  bitmap_sz_bits,
2409  false,
2410  device_type,
2411  sub_bitmap_count};
2412  if (approx_count_distinct_desc.bitmapPaddedSizeBytes() >=
2413  precise_count_distinct_desc.bitmapPaddedSizeBytes()) {
2414  auto precise_count_distinct = makeExpr<Analyzer::AggExpr>(
2415  get_agg_type(kCOUNT, arg.get()), kCOUNT, arg, true, nullptr);
2416  target_exprs_owned.push_back(precise_count_distinct);
2417  ra_exe_unit.target_exprs[i] = precise_count_distinct.get();
2418  }
2419  }
2420  return ra_exe_unit;
2421 }
std::vector< Analyzer::Expr * > target_exprs
int hll_size_for_rate(const int err_percent)
Definition: HyperLogLog.h:115
TargetInfo get_target_info(const PointerType target_expr, const bool bigint_count)
Definition: TargetInfo.h:66
#define CHECK_GE(x, y)
Definition: Logger.h:210
SQLTypeInfo get_agg_type(const SQLAgg agg_kind, const Analyzer::Expr *arg_expr)
int g_hll_precision_bits
size_t get_count_distinct_sub_bitmap_count(const size_t bitmap_sz_bits, const RelAlgExecutionUnit &ra_exe_unit, const ExecutorDeviceType device_type)
CHECK(cgen_state)
bool g_bigint_count
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)
Definition: sqldefs.h:76
bool g_cluster
Definition: sqltypes.h:46

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::do_table_reordering ( std::vector< InputDescriptor > &  input_descs,
std::list< std::shared_ptr< const InputColDescriptor >> &  input_col_descs,
const JoinQualsPerNestingLevel left_deep_join_quals,
std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const RA *  node,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 2967 of file RelAlgExecutor.cpp.

References CHECK(), g_cluster, get_input_desc(), get_input_nest_levels(), get_node_input_permutation(), and table_is_replicated().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

2974  {
2975  if (g_cluster) {
2976  // Disable table reordering in distributed mode. The aggregator does not have enough
2977  // information to break ties
2978  return {};
2979  }
2980  const auto& cat = *executor->getCatalog();
2981  for (const auto& table_info : query_infos) {
2982  if (table_info.table_id < 0) {
2983  continue;
2984  }
2985  const auto td = cat.getMetadataForTable(table_info.table_id);
2986  CHECK(td);
2987  if (table_is_replicated(td)) {
2988  return {};
2989  }
2990  }
2991  const auto input_permutation =
2992  get_node_input_permutation(left_deep_join_quals, query_infos, executor);
2993  input_to_nest_level = get_input_nest_levels(node, input_permutation);
2994  std::tie(input_descs, input_col_descs, std::ignore) =
2995  get_input_desc(node, input_to_nest_level, input_permutation, cat);
2996  return input_permutation;
2997 }
std::unordered_map< const RelAlgNode *, int > get_input_nest_levels(const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
std::vector< node_t > get_node_input_permutation(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor)
CHECK(cgen_state)
bool table_is_replicated(const TableDescriptor *td)
bool g_cluster
std::tuple< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > >, std::vector< std::shared_ptr< RexInput > > > get_input_desc(const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::exe_unit_has_quals ( const RelAlgExecutionUnit  ra_exe_unit)
inline

Definition at line 2348 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::join_quals, RelAlgExecutionUnit::quals, and RelAlgExecutionUnit::simple_quals.

Referenced by RelAlgExecutor::executeWorkUnit().

2348  {
2349  return !(ra_exe_unit.quals.empty() && ra_exe_unit.join_quals.empty() &&
2350  ra_exe_unit.simple_quals.empty());
2351 }
const JoinQualsPerNestingLevel join_quals
std::list< std::shared_ptr< Analyzer::Expr > > quals
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::first_oe_is_desc ( const std::list< Analyzer::OrderEntry > &  order_entries)

Definition at line 2140 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2140  {
2141  return !order_entries.empty() && order_entries.front().is_desc;
2142 }

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals ( const RexScalar scalar)

Definition at line 2886 of file RelAlgExecutor.cpp.

References CHECK_EQ, kAND, kBW_EQ, kEQ, kISNULL, kOR, and RexVisitorBase< T >::visit().

Referenced by get_bitwise_equals_conjunction().

2886  {
2887  const auto condition = dynamic_cast<const RexOperator*>(scalar);
2888  if (!condition || condition->getOperator() != kOR || condition->size() != 2) {
2889  return nullptr;
2890  }
2891  const auto equi_join_condition =
2892  dynamic_cast<const RexOperator*>(condition->getOperand(0));
2893  if (!equi_join_condition || equi_join_condition->getOperator() != kEQ) {
2894  return nullptr;
2895  }
2896  const auto both_are_null_condition =
2897  dynamic_cast<const RexOperator*>(condition->getOperand(1));
2898  if (!both_are_null_condition || both_are_null_condition->getOperator() != kAND ||
2899  both_are_null_condition->size() != 2) {
2900  return nullptr;
2901  }
2902  const auto lhs_is_null =
2903  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(0));
2904  const auto rhs_is_null =
2905  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(1));
2906  if (!lhs_is_null || !rhs_is_null || lhs_is_null->getOperator() != kISNULL ||
2907  rhs_is_null->getOperator() != kISNULL) {
2908  return nullptr;
2909  }
2910  CHECK_EQ(size_t(1), lhs_is_null->size());
2911  CHECK_EQ(size_t(1), rhs_is_null->size());
2912  CHECK_EQ(size_t(2), equi_join_condition->size());
2913  const auto eq_lhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(0));
2914  const auto eq_rhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(1));
2915  const auto is_null_lhs = dynamic_cast<const RexInput*>(lhs_is_null->getOperand(0));
2916  const auto is_null_rhs = dynamic_cast<const RexInput*>(rhs_is_null->getOperand(0));
2917  if (!eq_lhs || !eq_rhs || !is_null_lhs || !is_null_rhs) {
2918  return nullptr;
2919  }
2920  std::vector<std::unique_ptr<const RexScalar>> eq_operands;
2921  if (*eq_lhs == *is_null_lhs && *eq_rhs == *is_null_rhs) {
2922  RexDeepCopyVisitor deep_copy_visitor;
2923  auto lhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(0));
2924  auto rhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(1));
2925  eq_operands.emplace_back(lhs_op_copy.release());
2926  eq_operands.emplace_back(rhs_op_copy.release());
2927  return boost::make_unique<const RexOperator>(
2928  kBW_EQ, eq_operands, equi_join_condition->getType());
2929  }
2930  return nullptr;
2931 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
Definition: sqldefs.h:38
Definition: sqldefs.h:30
virtual T visit(const RexScalar *rex_scalar) const
Definition: RexVisitor.h:27
Definition: sqldefs.h:37
Definition: sqldefs.h:31

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals_conjunction ( const RexScalar scalar)

Definition at line 2933 of file RelAlgExecutor.cpp.

References CHECK_GE, get_bitwise_equals(), and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

2934  {
2935  const auto condition = dynamic_cast<const RexOperator*>(scalar);
2936  if (condition && condition->getOperator() == kAND) {
2937  CHECK_GE(condition->size(), size_t(2));
2938  auto acc = get_bitwise_equals(condition->getOperand(0));
2939  if (!acc) {
2940  return nullptr;
2941  }
2942  for (size_t i = 1; i < condition->size(); ++i) {
2943  std::vector<std::unique_ptr<const RexScalar>> and_operands;
2944  and_operands.emplace_back(std::move(acc));
2945  and_operands.emplace_back(get_bitwise_equals_conjunction(condition->getOperand(i)));
2946  acc =
2947  boost::make_unique<const RexOperator>(kAND, and_operands, condition->getType());
2948  }
2949  return acc;
2950  }
2951  return get_bitwise_equals(scalar);
2952 }
std::unique_ptr< const RexOperator > get_bitwise_equals_conjunction(const RexScalar *scalar)
#define CHECK_GE(x, y)
Definition: Logger.h:210
Definition: sqldefs.h:37
std::unique_ptr< const RexOperator > get_bitwise_equals(const RexScalar *scalar)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RelAlgNode* anonymous_namespace{RelAlgExecutor.cpp}::get_data_sink ( const RelAlgNode ra_node)

Definition at line 681 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), RelAlgNode::inputCount(), and join().

Referenced by collect_used_input_desc(), get_input_desc_impl(), get_input_nest_levels(), get_inputs_meta(), get_join_source_used_inputs(), get_join_type(), and get_used_inputs().

681  {
682  if (auto join = dynamic_cast<const RelJoin*>(ra_node)) {
683  CHECK_EQ(size_t(2), join->inputCount());
684  return join;
685  }
686  CHECK_EQ(size_t(1), ra_node->inputCount());
687  auto only_src = ra_node->getInput(0);
688  const bool is_join = dynamic_cast<const RelJoin*>(only_src) ||
689  dynamic_cast<const RelLeftDeepInnerJoin*>(only_src);
690  return is_join ? only_src : ra_node;
691 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::tuple<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> >, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc ( const RA *  ra_node,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 958 of file RelAlgExecutor.cpp.

References get_input_desc_impl(), and get_used_inputs().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), and do_table_reordering().

961  {
962  std::unordered_set<const RexInput*> used_inputs;
963  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
964  std::tie(used_inputs, used_inputs_owned) = get_used_inputs(ra_node, cat);
965  auto input_desc_pair = get_input_desc_impl(
966  ra_node, used_inputs, input_to_nest_level, input_permutation, cat);
967  return std::make_tuple(
968  input_desc_pair.first, input_desc_pair.second, used_inputs_owned);
969 }
std::pair< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > > > get_input_desc_impl(const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs(const RelCompound *compound, const Catalog_Namespace::Catalog &cat)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::pair<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc_impl ( const RA *  ra_node,
const std::unordered_set< const RexInput * > &  used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 900 of file RelAlgExecutor.cpp.

References collect_used_input_desc(), get_data_sink(), get_join_source_used_inputs(), InputDescriptor::getNestLevel(), and table_id_from_ra().

Referenced by get_input_desc().

904  {
905  std::vector<InputDescriptor> input_descs;
906  const auto data_sink_node = get_data_sink(ra_node);
907  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
908  const auto input_node_idx =
909  input_permutation.empty() ? input_idx : input_permutation[input_idx];
910  const auto input_ra = data_sink_node->getInput(input_node_idx);
911  const int table_id = table_id_from_ra(input_ra);
912  input_descs.emplace_back(table_id, input_idx);
913  }
914  std::sort(input_descs.begin(),
915  input_descs.end(),
916  [](const InputDescriptor& lhs, const InputDescriptor& rhs) {
917  return lhs.getNestLevel() < rhs.getNestLevel();
918  });
919  std::unordered_set<std::shared_ptr<const InputColDescriptor>> input_col_descs_unique;
920  collect_used_input_desc(input_descs,
921  cat,
922  input_col_descs_unique,
923  ra_node,
924  used_inputs,
925  input_to_nest_level);
926  std::unordered_set<const RexInput*> join_source_used_inputs;
927  std::vector<std::shared_ptr<RexInput>> join_source_used_inputs_owned;
928  std::tie(join_source_used_inputs, join_source_used_inputs_owned) =
929  get_join_source_used_inputs(ra_node, cat);
930  collect_used_input_desc(input_descs,
931  cat,
932  input_col_descs_unique,
933  ra_node,
934  join_source_used_inputs,
935  input_to_nest_level);
936  std::vector<std::shared_ptr<const InputColDescriptor>> input_col_descs(
937  input_col_descs_unique.begin(), input_col_descs_unique.end());
938 
939  std::sort(
940  input_col_descs.begin(),
941  input_col_descs.end(),
942  [](std::shared_ptr<const InputColDescriptor> const& lhs,
943  std::shared_ptr<const InputColDescriptor> const& rhs) {
944  if (lhs->getScanDesc().getNestLevel() == rhs->getScanDesc().getNestLevel()) {
945  return lhs->getColId() < rhs->getColId();
946  }
947  return lhs->getScanDesc().getNestLevel() < rhs->getScanDesc().getNestLevel();
948  });
949  return {input_descs,
950  std::list<std::shared_ptr<const InputColDescriptor>>(input_col_descs.begin(),
951  input_col_descs.end())};
952 }
void collect_used_input_desc(std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_join_source_used_inputs(const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
int table_id_from_ra(const RelAlgNode *ra_node)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
int getNestLevel() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_map<const RelAlgNode*, int> anonymous_namespace{RelAlgExecutor.cpp}::get_input_nest_levels ( const RelAlgNode ra_node,
const std::vector< size_t > &  input_permutation 
)

Definition at line 796 of file RelAlgExecutor.cpp.

References CHECK(), get_data_sink(), logger::INFO, and LOG_IF.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), and do_table_reordering().

798  {
799  const auto data_sink_node = get_data_sink(ra_node);
800  std::unordered_map<const RelAlgNode*, int> input_to_nest_level;
801  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
802  const auto input_node_idx =
803  input_permutation.empty() ? input_idx : input_permutation[input_idx];
804  const auto input_ra = data_sink_node->getInput(input_node_idx);
805  const auto it_ok = input_to_nest_level.emplace(input_ra, input_idx);
806  CHECK(it_ok.second);
807  LOG_IF(INFO, !input_permutation.empty())
808  << "Assigned input " << input_ra->toString() << " to nest level " << input_idx;
809  }
810  return input_to_nest_level;
811 }
#define LOG_IF(severity, condition)
Definition: Logger.h:287
CHECK(cgen_state)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::vector<TargetMetaInfo>, std::vector<std::shared_ptr<Analyzer::Expr> > > anonymous_namespace{RelAlgExecutor.cpp}::get_inputs_meta ( const RelFilter filter,
const RelAlgTranslator translator,
const std::vector< std::shared_ptr< RexInput >> &  inputs_owned,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3500 of file RelAlgExecutor.cpp.

References CHECK(), get_data_sink(), get_exprs_not_owned(), get_targets_meta(), synthesize_inputs(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createFilterWorkUnit().

3503  {
3504  std::vector<TargetMetaInfo> in_metainfo;
3505  std::vector<std::shared_ptr<Analyzer::Expr>> exprs_owned;
3506  const auto data_sink_node = get_data_sink(filter);
3507  auto input_it = inputs_owned.begin();
3508  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
3509  const auto source = data_sink_node->getInput(nest_level);
3510  const auto scan_source = dynamic_cast<const RelScan*>(source);
3511  if (scan_source) {
3512  CHECK(source->getOutputMetainfo().empty());
3513  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources_owned;
3514  for (size_t i = 0; i < scan_source->size(); ++i, ++input_it) {
3515  scalar_sources_owned.push_back(translator.translateScalarRex(input_it->get()));
3516  }
3517  const auto source_metadata =
3518  get_targets_meta(scan_source, get_exprs_not_owned(scalar_sources_owned));
3519  in_metainfo.insert(
3520  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3521  exprs_owned.insert(
3522  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3523  } else {
3524  const auto& source_metadata = source->getOutputMetainfo();
3525  input_it += source_metadata.size();
3526  in_metainfo.insert(
3527  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3528  const auto scalar_sources_owned = synthesize_inputs(
3529  data_sink_node, nest_level, source_metadata, input_to_nest_level);
3530  exprs_owned.insert(
3531  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3532  }
3533  }
3534  return std::make_pair(in_metainfo, exprs_owned);
3535 }
std::vector< std::shared_ptr< Analyzer::Expr > > synthesize_inputs(const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
std::vector< Analyzer::Expr * > get_exprs_not_owned(const std::vector< std::shared_ptr< Analyzer::Expr >> &exprs)
Definition: Execute.h:214
CHECK(cgen_state)
std::vector< TargetMetaInfo > get_targets_meta(const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_join_source_used_inputs ( const RelAlgNode ra_node,
const Catalog_Namespace::Catalog cat 
)

Definition at line 814 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, get_data_sink(), anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelAlgNode::inputCount(), join(), run_benchmark_import::result, and RexVisitorBase< T >::visit().

Referenced by get_input_desc_impl().

815  {
816  const auto data_sink_node = get_data_sink(ra_node);
817  if (auto join = dynamic_cast<const RelJoin*>(data_sink_node)) {
818  CHECK_EQ(join->inputCount(), 2u);
819  const auto condition = join->getCondition();
820  RexUsedInputsVisitor visitor(cat);
821  auto condition_inputs = visitor.visit(condition);
822  std::vector<std::shared_ptr<RexInput>> condition_inputs_owned(
823  visitor.get_inputs_owned());
824  return std::make_pair(condition_inputs, condition_inputs_owned);
825  }
826 
827  if (auto left_deep_join = dynamic_cast<const RelLeftDeepInnerJoin*>(data_sink_node)) {
828  CHECK_GE(left_deep_join->inputCount(), 2u);
829  const auto condition = left_deep_join->getInnerCondition();
830  RexUsedInputsVisitor visitor(cat);
831  auto result = visitor.visit(condition);
832  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
833  ++nesting_level) {
834  const auto outer_condition = left_deep_join->getOuterCondition(nesting_level);
835  if (outer_condition) {
836  const auto outer_result = visitor.visit(outer_condition);
837  result.insert(outer_result.begin(), outer_result.end());
838  }
839  }
840  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
841  return std::make_pair(result, used_inputs_owned);
842  }
843 
844  CHECK_EQ(ra_node->inputCount(), 1u);
845  return std::make_pair(std::unordered_set<const RexInput*>{},
846  std::vector<std::shared_ptr<RexInput>>{});
847 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
#define CHECK_GE(x, y)
Definition: Logger.h:210
const size_t inputCount() const
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

JoinType anonymous_namespace{RelAlgExecutor.cpp}::get_join_type ( const RelAlgNode ra)

Definition at line 2874 of file RelAlgExecutor.cpp.

References get_data_sink(), INNER, INVALID, and join().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

2874  {
2875  auto sink = get_data_sink(ra);
2876  if (auto join = dynamic_cast<const RelJoin*>(sink)) {
2877  return join->getJoinType();
2878  }
2879  if (dynamic_cast<const RelLeftDeepInnerJoin*>(sink)) {
2880  return JoinType::INNER;
2881  }
2882 
2883  return JoinType::INVALID;
2884 }
std::string join(T const &container, std::string const &delim)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::get_left_deep_join_input_sizes ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 2999 of file RelAlgExecutor.cpp.

References get_node_output(), RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3000  {
3001  std::vector<size_t> input_sizes;
3002  for (size_t i = 0; i < left_deep_join->inputCount(); ++i) {
3003  const auto inputs = get_node_output(left_deep_join->getInput(i));
3004  input_sizes.push_back(inputs.size());
3005  }
3006  return input_sizes;
3007 }
const RelAlgNode * getInput(const size_t idx) const
RANodeOutput get_node_output(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

SQLTypeInfo anonymous_namespace{RelAlgExecutor.cpp}::get_logical_type_for_expr ( const Analyzer::Expr expr)
inline

Definition at line 1209 of file RelAlgExecutor.cpp.

References get_logical_type_info(), get_nullable_logical_type_info(), Analyzer::Expr::get_type_info(), is_agg(), is_count_distinct(), and kBIGINT.

Referenced by get_targets_meta().

1209  {
1210  if (is_count_distinct(&expr)) {
1211  return SQLTypeInfo(kBIGINT, false);
1212  } else if (is_agg(&expr)) {
1214  }
1215  return get_logical_type_info(expr.get_type_info());
1216 }
bool is_agg(const Analyzer::Expr *expr)
SQLTypeInfo get_nullable_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:811
SQLTypeInfo get_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:796
bool is_count_distinct(const Analyzer::Expr *expr)
const SQLTypeInfo & get_type_info() const
Definition: Analyzer.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<const RelAlgNode*> anonymous_namespace{RelAlgExecutor.cpp}::get_non_join_sequence ( const RelAlgNode ra)

Definition at line 849 of file RelAlgExecutor.cpp.

References CHECK_EQ, and join().

Referenced by collect_used_input_desc().

849  {
850  std::vector<const RelAlgNode*> seq;
851  for (auto join = dynamic_cast<const RelJoin*>(ra); join;
852  join = static_cast<const RelJoin*>(join->getInput(0))) {
853  CHECK_EQ(size_t(2), join->inputCount());
854  seq.emplace_back(join->getInput(1));
855  auto lhs = join->getInput(0);
856  if (!dynamic_cast<const RelJoin*>(lhs)) {
857  seq.emplace_back(lhs);
858  break;
859  }
860  }
861  std::reverse(seq.begin(), seq.end());
862  return seq;
863 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<Analyzer::OrderEntry> anonymous_namespace{RelAlgExecutor.cpp}::get_order_entries ( const RelSort sort)

Definition at line 2120 of file RelAlgExecutor.cpp.

References RelSort::collationCount(), Descending, First, RelSort::getCollation(), and run_benchmark_import::result.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2120  {
2121  std::list<Analyzer::OrderEntry> result;
2122  for (size_t i = 0; i < sort->collationCount(); ++i) {
2123  const auto sort_field = sort->getCollation(i);
2124  result.emplace_back(sort_field.getField() + 1,
2125  sort_field.getSortDir() == SortDirection::Descending,
2126  sort_field.getNullsPosition() == NullSortedPosition::First);
2127  }
2128  return result;
2129 }
SortField getCollation(const size_t i) const
size_t collationCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_set<PhysicalInput> anonymous_namespace{RelAlgExecutor.cpp}::get_physical_inputs ( const Catalog_Namespace::Catalog cat,
const RelAlgNode ra 
)

Definition at line 58 of file RelAlgExecutor.cpp.

References get_physical_inputs(), and Catalog_Namespace::Catalog::getColumnIdBySpi().

60  {
61  auto phys_inputs = get_physical_inputs(ra);
62  std::unordered_set<PhysicalInput> phys_inputs2;
63  for (auto& phi : phys_inputs) {
64  phys_inputs2.insert(
65  PhysicalInput{cat.getColumnIdBySpi(phi.table_id, phi.col_id), phi.table_id});
66  }
67  return phys_inputs2;
68 }
std::unordered_set< PhysicalInput > get_physical_inputs(const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1526

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelCompound compound)

Definition at line 971 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSourcesSize().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

971  {
972  return compound->getScalarSourcesSize();
973 }
const size_t getScalarSourcesSize() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelProject project)

Definition at line 975 of file RelAlgExecutor.cpp.

References RelProject::size().

975  {
976  return project->size();
977 }
size_t size() const override

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelTableFunction table_func)

Definition at line 979 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputsSize().

979  {
980  return table_func->getTableFuncInputsSize();
981 }
size_t getTableFuncInputsSize() const

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scan_limit ( const RelAlgNode ra,
const size_t  limit 
)

Definition at line 2131 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit().

2131  {
2132  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
2133  if (aggregate) {
2134  return 0;
2135  }
2136  const auto compound = dynamic_cast<const RelCompound*>(ra);
2137  return (compound && compound->isAggregate()) ? 0 : limit;
2138 }

+ Here is the caller graph for this function:

template<class RA >
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RA *  ra_node,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1219 of file RelAlgExecutor.cpp.

References CHECK(), and get_logical_type_for_expr().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), and get_inputs_meta().

1221  {
1222  std::vector<TargetMetaInfo> targets_meta;
1223  for (size_t i = 0; i < ra_node->size(); ++i) {
1224  CHECK(target_exprs[i]);
1225  // TODO(alex): remove the count distinct type fixup.
1226  targets_meta.emplace_back(ra_node->getFieldName(i),
1227  get_logical_type_for_expr(*target_exprs[i]),
1228  target_exprs[i]->get_type_info());
1229  }
1230  return targets_meta;
1231 }
CHECK(cgen_state)
SQLTypeInfo get_logical_type_for_expr(const Analyzer::Expr &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelCompound compound,
const Catalog_Namespace::Catalog cat 
)

Definition at line 694 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelCompound::getFilterExpr(), RelCompound::getScalarSource(), RelCompound::getScalarSourcesSize(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc().

694  {
695  RexUsedInputsVisitor visitor(cat);
696  const auto filter_expr = compound->getFilterExpr();
697  std::unordered_set<const RexInput*> used_inputs =
698  filter_expr ? visitor.visit(filter_expr) : std::unordered_set<const RexInput*>{};
699  const auto sources_size = compound->getScalarSourcesSize();
700  for (size_t i = 0; i < sources_size; ++i) {
701  const auto source_inputs = visitor.visit(compound->getScalarSource(i));
702  used_inputs.insert(source_inputs.begin(), source_inputs.end());
703  }
704  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
705  return std::make_pair(used_inputs, used_inputs_owned);
706 }
const RexScalar * getFilterExpr() const
const size_t getScalarSourcesSize() const
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelAggregate aggregate,
const Catalog_Namespace::Catalog cat 
)

Definition at line 709 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, RelAggregate::getAggExprs(), RelAggregate::getGroupByCount(), RelAlgNode::getInput(), RelAlgNode::getOutputMetainfo(), and RelAlgNode::inputCount().

709  {
710  CHECK_EQ(size_t(1), aggregate->inputCount());
711  std::unordered_set<const RexInput*> used_inputs;
712  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
713  const auto source = aggregate->getInput(0);
714  const auto& in_metainfo = source->getOutputMetainfo();
715  const auto group_count = aggregate->getGroupByCount();
716  CHECK_GE(in_metainfo.size(), group_count);
717  for (size_t i = 0; i < group_count; ++i) {
718  auto synthesized_used_input = new RexInput(source, i);
719  used_inputs_owned.emplace_back(synthesized_used_input);
720  used_inputs.insert(synthesized_used_input);
721  }
722  for (const auto& agg_expr : aggregate->getAggExprs()) {
723  for (size_t i = 0; i < agg_expr->size(); ++i) {
724  const auto operand_idx = agg_expr->getOperand(i);
725  CHECK_GE(in_metainfo.size(), static_cast<size_t>(operand_idx));
726  auto synthesized_used_input = new RexInput(source, operand_idx);
727  used_inputs_owned.emplace_back(synthesized_used_input);
728  used_inputs.insert(synthesized_used_input);
729  }
730  }
731  return std::make_pair(used_inputs, used_inputs_owned);
732 }
const size_t getGroupByCount() const
#define CHECK_EQ(x, y)
Definition: Logger.h:205
#define CHECK_GE(x, y)
Definition: Logger.h:210
const RelAlgNode * getInput(const size_t idx) const
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
const size_t inputCount() const
const std::vector< TargetMetaInfo > & getOutputMetainfo() const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelProject project,
const Catalog_Namespace::Catalog cat 
)

Definition at line 735 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelProject::getProjectAt(), RelProject::size(), and RexVisitorBase< T >::visit().

735  {
736  RexUsedInputsVisitor visitor(cat);
737  std::unordered_set<const RexInput*> used_inputs;
738  for (size_t i = 0; i < project->size(); ++i) {
739  const auto proj_inputs = visitor.visit(project->getProjectAt(i));
740  used_inputs.insert(proj_inputs.begin(), proj_inputs.end());
741  }
742  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
743  return std::make_pair(used_inputs, used_inputs_owned);
744 }
size_t size() const override
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelTableFunction table_func,
const Catalog_Namespace::Catalog cat 
)

Definition at line 747 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelTableFunction::getTableFuncInputAt(), RelTableFunction::getTableFuncInputsSize(), and RexVisitorBase< T >::visit().

748  {
749  RexUsedInputsVisitor visitor(cat);
750  std::unordered_set<const RexInput*> used_inputs;
751  for (size_t i = 0; i < table_func->getTableFuncInputsSize(); ++i) {
752  const auto table_func_inputs = visitor.visit(table_func->getTableFuncInputAt(i));
753  used_inputs.insert(table_func_inputs.begin(), table_func_inputs.end());
754  }
755  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
756  return std::make_pair(used_inputs, used_inputs_owned);
757 }
size_t getTableFuncInputsSize() const
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelFilter filter,
const Catalog_Namespace::Catalog cat 
)

Definition at line 760 of file RelAlgExecutor.cpp.

References CHECK(), and get_data_sink().

760  {
761  std::unordered_set<const RexInput*> used_inputs;
762  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
763  const auto data_sink_node = get_data_sink(filter);
764  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
765  const auto source = data_sink_node->getInput(nest_level);
766  const auto scan_source = dynamic_cast<const RelScan*>(source);
767  if (scan_source) {
768  CHECK(source->getOutputMetainfo().empty());
769  for (size_t i = 0; i < scan_source->size(); ++i) {
770  auto synthesized_used_input = new RexInput(scan_source, i);
771  used_inputs_owned.emplace_back(synthesized_used_input);
772  used_inputs.insert(synthesized_used_input);
773  }
774  } else {
775  const auto& partial_in_metadata = source->getOutputMetainfo();
776  for (size_t i = 0; i < partial_in_metadata.size(); ++i) {
777  auto synthesized_used_input = new RexInput(source, i);
778  used_inputs_owned.emplace_back(synthesized_used_input);
779  used_inputs.insert(synthesized_used_input);
780  }
781  }
782  }
783  return std::make_pair(used_inputs, used_inputs_owned);
784 }
CHECK(cgen_state)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::groups_approx_upper_bound ( const std::vector< InputTableInfo > &  table_infos)

Upper bound estimation for the number of groups. Not strictly correct and not tight, but if the tables involved are really small we shouldn't waste time doing the NDV estimation. We don't account for cross-joins and / or group by unnested array, which is the reason this estimation isn't entirely reliable.

Definition at line 2317 of file RelAlgExecutor.cpp.

References CHECK().

Referenced by RelAlgExecutor::executeWorkUnit().

2317  {
2318  CHECK(!table_infos.empty());
2319  const auto& first_table = table_infos.front();
2320  size_t max_num_groups = first_table.info.getNumTuplesUpperBound();
2321  for (const auto& table_info : table_infos) {
2322  if (table_info.info.getNumTuplesUpperBound() > max_num_groups) {
2323  max_num_groups = table_info.info.getNumTuplesUpperBound();
2324  }
2325  }
2326  return std::max(max_num_groups, size_t(1));
2327 }
CHECK(cgen_state)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const std::string &  columnName,
const SQLTypeInfo columnType,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 1766 of file RelAlgExecutor.cpp.

References CHECK(), logger::ERROR, SQLTypeInfo::get_comp_param(), Analyzer::Constant::get_constval(), Analyzer::Constant::get_is_null(), Catalog_Namespace::Catalog::getMetadataForDict(), inline_fixed_encoding_null_val(), LOG, and Datum::stringval.

Referenced by RelAlgExecutor::executeSimpleInsert(), and insert_one_dict_str().

1770  {
1771  if (col_cv->get_is_null()) {
1772  *col_data = inline_fixed_encoding_null_val(columnType);
1773  } else {
1774  const int dict_id = columnType.get_comp_param();
1775  const auto col_datum = col_cv->get_constval();
1776  const auto& str = *col_datum.stringval;
1777  const auto dd = catalog.getMetadataForDict(dict_id);
1778  CHECK(dd && dd->stringDict);
1779  int32_t str_id = dd->stringDict->getOrAdd(str);
1780  if (!dd->dictIsTemp) {
1781  const auto checkpoint_ok = dd->stringDict->checkpoint();
1782  if (!checkpoint_ok) {
1783  throw std::runtime_error("Failed to checkpoint dictionary for column " +
1784  columnName);
1785  }
1786  }
1787  const bool invalid = str_id > max_valid_int_value<T>();
1788  if (invalid || str_id == inline_int_null_value<int32_t>()) {
1789  if (invalid) {
1790  LOG(ERROR) << "Could not encode string: " << str
1791  << ", the encoded value doesn't fit in " << sizeof(T) * 8
1792  << " bits. Will store NULL instead.";
1793  }
1794  str_id = inline_fixed_encoding_null_val(columnType);
1795  }
1796  *col_data = str_id;
1797  }
1798  return *col_data;
1799 }
#define LOG(tag)
Definition: Logger.h:188
bool get_is_null() const
Definition: Analyzer.h:328
CHECK(cgen_state)
const DictDescriptor * getMetadataForDict(int dict_ref, bool loadDict=true) const
Definition: Catalog.cpp:1444
std::string * stringval
Definition: sqltypes.h:132
Datum get_constval() const
Definition: Analyzer.h:329
HOST DEVICE int get_comp_param() const
Definition: sqltypes.h:257
int64_t inline_fixed_encoding_null_val(const SQL_TYPE_INFO &ti)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const ColumnDescriptor cd,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 1802 of file RelAlgExecutor.cpp.

References ColumnDescriptor::columnName, ColumnDescriptor::columnType, and insert_one_dict_str().

1805  {
1806  return insert_one_dict_str(col_data, cd->columnName, cd->columnType, col_cv, catalog);
1807 }
int64_t insert_one_dict_str(T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
SQLTypeInfo columnType
std::string columnName

+ Here is the call graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_agg ( const Analyzer::Expr expr)

Definition at line 1197 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_aggtype(), kAVG, kMAX, kMIN, and kSUM.

Referenced by anonymous_namespace{RelAlgDagBuilder.cpp}::create_compound(), RelAlgExecutor::executeWorkUnit(), Executor::executeWorkUnitImpl(), get_logical_type_for_expr(), and ResultSet::getSingleSlotTargetBitmap().

1197  {
1198  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1199  if (agg_expr && agg_expr->get_contains_agg()) {
1200  auto agg_type = agg_expr->get_aggtype();
1201  if (agg_type == SQLAgg::kMIN || agg_type == SQLAgg::kMAX ||
1202  agg_type == SQLAgg::kSUM || agg_type == SQLAgg::kAVG) {
1203  return true;
1204  }
1205  }
1206  return false;
1207 }
Definition: sqldefs.h:73
Definition: sqldefs.h:75
SQLAgg get_aggtype() const
Definition: Analyzer.h:1044
Definition: sqldefs.h:74
Definition: sqldefs.h:72

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_count_distinct ( const Analyzer::Expr expr)

Definition at line 1192 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_is_distinct().

Referenced by get_logical_type_for_expr().

1192  {
1193  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1194  return agg_expr && agg_expr->get_is_distinct();
1195 }
bool get_is_distinct() const
Definition: Analyzer.h:1047

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_window_execution_unit ( const RelAlgExecutionUnit ra_exe_unit)

Definition at line 1459 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

1459  {
1460  return std::any_of(ra_exe_unit.target_exprs.begin(),
1461  ra_exe_unit.target_exprs.end(),
1462  [](const Analyzer::Expr* expr) {
1463  return dynamic_cast<const Analyzer::WindowFunction*>(expr);
1464  });
1465 }
std::vector< Analyzer::Expr * > target_exprs

+ Here is the caller graph for this function:

std::vector<JoinType> anonymous_namespace{RelAlgExecutor.cpp}::left_deep_join_types ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 2954 of file RelAlgExecutor.cpp.

References CHECK_GE, RelLeftDeepInnerJoin::getOuterCondition(), INNER, RelAlgNode::inputCount(), and LEFT.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::translateLeftDeepJoinFilter().

2954  {
2955  CHECK_GE(left_deep_join->inputCount(), size_t(2));
2956  std::vector<JoinType> join_types(left_deep_join->inputCount() - 1, JoinType::INNER);
2957  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
2958  ++nesting_level) {
2959  if (left_deep_join->getOuterCondition(nesting_level)) {
2960  join_types[nesting_level - 1] = JoinType::LEFT;
2961  }
2962  }
2963  return join_types;
2964 }
const RexScalar * getOuterCondition(const size_t nesting_level) const
#define CHECK_GE(x, y)
Definition: Logger.h:210
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class QualsList >
bool anonymous_namespace{RelAlgExecutor.cpp}::list_contains_expression ( const QualsList &  haystack,
const std::shared_ptr< Analyzer::Expr > &  needle 
)

Definition at line 3133 of file RelAlgExecutor.cpp.

Referenced by reverse_logical_distribution().

3134  {
3135  for (const auto& qual : haystack) {
3136  if (*qual == *needle) {
3137  return true;
3138  }
3139  }
3140  return false;
3141 }

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::node_is_aggregate ( const RelAlgNode ra)

Definition at line 52 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

52  {
53  const auto compound = dynamic_cast<const RelCompound*>(ra);
54  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
55  return ((compound && compound->isAggregate()) || aggregate);
56 }

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::reverse_logical_distribution ( const std::shared_ptr< Analyzer::Expr > &  expr)

Definition at line 3146 of file RelAlgExecutor.cpp.

References build_logical_expression(), CHECK_GE, kAND, kONE, kOR, list_contains_expression(), Parser::OperExpr::normalize(), qual_to_conjunctive_form(), and qual_to_disjunctive_form().

Referenced by RelAlgExecutor::makeJoinQuals().

3147  {
3148  const auto expr_terms = qual_to_disjunctive_form(expr);
3149  CHECK_GE(expr_terms.size(), size_t(1));
3150  const auto& first_term = expr_terms.front();
3151  const auto first_term_factors = qual_to_conjunctive_form(first_term);
3152  std::vector<std::shared_ptr<Analyzer::Expr>> common_factors;
3153  // First, collect the conjunctive components common to all the disjunctive components.
3154  // Don't do it for simple qualifiers, we only care about expensive or join qualifiers.
3155  for (const auto& first_term_factor : first_term_factors.quals) {
3156  bool is_common =
3157  expr_terms.size() > 1; // Only report common factors for disjunction.
3158  for (size_t i = 1; i < expr_terms.size(); ++i) {
3159  const auto crt_term_factors = qual_to_conjunctive_form(expr_terms[i]);
3160  if (!list_contains_expression(crt_term_factors.quals, first_term_factor)) {
3161  is_common = false;
3162  break;
3163  }
3164  }
3165  if (is_common) {
3166  common_factors.push_back(first_term_factor);
3167  }
3168  }
3169  if (common_factors.empty()) {
3170  return expr;
3171  }
3172  // Now that the common expressions are known, collect the remaining expressions.
3173  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_terms;
3174  for (const auto& term : expr_terms) {
3175  const auto term_cf = qual_to_conjunctive_form(term);
3176  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_quals(
3177  term_cf.simple_quals.begin(), term_cf.simple_quals.end());
3178  for (const auto& qual : term_cf.quals) {
3179  if (!list_contains_expression(common_factors, qual)) {
3180  remaining_quals.push_back(qual);
3181  }
3182  }
3183  if (!remaining_quals.empty()) {
3184  remaining_terms.push_back(build_logical_expression(remaining_quals, kAND));
3185  }
3186  }
3187  // Reconstruct the expression with the transformation applied.
3188  const auto common_expr = build_logical_expression(common_factors, kAND);
3189  if (remaining_terms.empty()) {
3190  return common_expr;
3191  }
3192  const auto remaining_expr = build_logical_expression(remaining_terms, kOR);
3193  return Parser::OperExpr::normalize(kAND, kONE, common_expr, remaining_expr);
3194 }
Definition: sqldefs.h:38
#define CHECK_GE(x, y)
Definition: Logger.h:210
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
bool list_contains_expression(const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
Definition: sqldefs.h:37
std::shared_ptr< Analyzer::Expr > build_logical_expression(const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:259
Definition: sqldefs.h:69
std::vector< std::shared_ptr< Analyzer::Expr > > qual_to_disjunctive_form(const std::shared_ptr< Analyzer::Expr > &qual_expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::rewrite_quals ( const std::list< std::shared_ptr< Analyzer::Expr >> &  quals)

Definition at line 3009 of file RelAlgExecutor.cpp.

References rewrite_expr().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

3010  {
3011  std::list<std::shared_ptr<Analyzer::Expr>> rewritten_quals;
3012  for (const auto& qual : quals) {
3013  const auto rewritten_qual = rewrite_expr(qual.get());
3014  rewritten_quals.push_back(rewritten_qual ? rewritten_qual : qual);
3015  }
3016  return rewritten_quals;
3017 }
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<const RexScalar*> anonymous_namespace{RelAlgExecutor.cpp}::rex_to_conjunctive_form ( const RexScalar qual_expr)

Definition at line 3106 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3106  {
3107  CHECK(qual_expr);
3108  const auto bin_oper = dynamic_cast<const RexOperator*>(qual_expr);
3109  if (!bin_oper || bin_oper->getOperator() != kAND) {
3110  return {qual_expr};
3111  }
3112  CHECK_GE(bin_oper->size(), size_t(2));
3113  auto lhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(0));
3114  for (size_t i = 1; i < bin_oper->size(); ++i) {
3115  const auto rhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(i));
3116  lhs_cf.insert(lhs_cf.end(), rhs_cf.begin(), rhs_cf.end());
3117  }
3118  return lhs_cf;
3119 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::vector< const RexScalar * > rex_to_conjunctive_form(const RexScalar *qual_expr)
CHECK(cgen_state)
Definition: sqldefs.h:37

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelCompound compound 
)

Definition at line 983 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSource().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

983  {
984  return compound->getScalarSource(i);
985 }
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelProject project 
)

Definition at line 987 of file RelAlgExecutor.cpp.

References RelProject::getProjectAt().

987  {
988  return project->getProjectAt(i);
989 }
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelTableFunction table_func 
)

Definition at line 991 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputAt().

991  {
992  return table_func->getTableFuncInputAt(i);
993 }
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict ( const std::shared_ptr< Analyzer::Expr expr)

Definition at line 995 of file RelAlgExecutor.cpp.

References kENCODING_DICT, kENCODING_NONE, and TRANSIENT_DICT_ID.

Referenced by set_transient_dict_maybe(), translate_groupby_exprs(), and translate_targets().

996  {
997  const auto& ti = expr->get_type_info();
998  if (!ti.is_string() || ti.get_compression() != kENCODING_NONE) {
999  return expr;
1000  }
1001  auto transient_dict_ti = ti;
1002  transient_dict_ti.set_compression(kENCODING_DICT);
1003  transient_dict_ti.set_comp_param(TRANSIENT_DICT_ID);
1004  transient_dict_ti.set_fixed_size();
1005  return expr->add_cast(transient_dict_ti);
1006 }
#define TRANSIENT_DICT_ID
Definition: sqltypes.h:187

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict_maybe ( std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::shared_ptr< Analyzer::Expr > &  expr 
)

Definition at line 1008 of file RelAlgExecutor.cpp.

References fold_expr(), and set_transient_dict().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1010  {
1011  try {
1012  scalar_sources.push_back(set_transient_dict(fold_expr(expr.get())));
1013  } catch (...) {
1014  scalar_sources.push_back(fold_expr(expr.get()));
1015  }
1016 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::synthesize_inputs ( const RelAlgNode ra_node,
const size_t  nest_level,
const std::vector< TargetMetaInfo > &  in_metainfo,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3273 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, CHECK_LE, RelAlgNode::getInput(), RelAlgNode::inputCount(), and table_id_from_ra().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and get_inputs_meta().

3277  {
3278  CHECK_LE(size_t(1), ra_node->inputCount());
3279  CHECK_GE(size_t(2), ra_node->inputCount());
3280  const auto input = ra_node->getInput(nest_level);
3281  const auto it_rte_idx = input_to_nest_level.find(input);
3282  CHECK(it_rte_idx != input_to_nest_level.end());
3283  const int rte_idx = it_rte_idx->second;
3284  const int table_id = table_id_from_ra(input);
3285  std::vector<std::shared_ptr<Analyzer::Expr>> inputs;
3286  const auto scan_ra = dynamic_cast<const RelScan*>(input);
3287  int input_idx = 0;
3288  for (const auto& input_meta : in_metainfo) {
3289  inputs.push_back(
3290  std::make_shared<Analyzer::ColumnVar>(input_meta.get_type_info(),
3291  table_id,
3292  scan_ra ? input_idx + 1 : input_idx,
3293  rte_idx));
3294  ++input_idx;
3295  }
3296  return inputs;
3297 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
CHECK(cgen_state)
const RelAlgNode * getInput(const size_t idx) const
#define CHECK_LE(x, y)
Definition: Logger.h:208
int table_id_from_ra(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

int anonymous_namespace{RelAlgExecutor.cpp}::table_id_from_ra ( const RelAlgNode ra_node)

Definition at line 786 of file RelAlgExecutor.cpp.

References CHECK(), RelAlgNode::getId(), and RelScan::getTableDescriptor().

Referenced by collect_used_input_desc(), get_input_desc_impl(), and synthesize_inputs().

786  {
787  const auto scan_ra = dynamic_cast<const RelScan*>(ra_node);
788  if (scan_ra) {
789  const auto td = scan_ra->getTableDescriptor();
790  CHECK(td);
791  return td->tableId;
792  }
793  return -ra_node->getId();
794 }
CHECK(cgen_state)
unsigned getId() const
const TableDescriptor * getTableDescriptor() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::transform_to_inner ( const Analyzer::Expr expr)

Definition at line 1549 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::computeWindow().

1549  {
1550  const auto tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(expr);
1551  if (tuple) {
1552  std::vector<std::shared_ptr<Analyzer::Expr>> transformed_tuple;
1553  for (const auto& element : tuple->getTuple()) {
1554  transformed_tuple.push_back(transform_to_inner(element.get()));
1555  }
1556  return makeExpr<Analyzer::ExpressionTuple>(transformed_tuple);
1557  }
1558  const auto col = dynamic_cast<const Analyzer::ColumnVar*>(expr);
1559  if (!col) {
1560  throw std::runtime_error("Only columns supported in the window partition for now");
1561  }
1562  return makeExpr<Analyzer::ColumnVar>(
1563  col->get_type_info(), col->get_table_id(), col->get_column_id(), 1);
1564 }
std::shared_ptr< Analyzer::Expr > transform_to_inner(const Analyzer::Expr *expr)

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelCompound compound,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1088 of file RelAlgExecutor.cpp.

References RelCompound::getGroupByCount(), RelCompound::isAggregate(), and set_transient_dict().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1090  {
1091  if (!compound->isAggregate()) {
1092  return {nullptr};
1093  }
1094  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1095  for (size_t group_idx = 0; group_idx < compound->getGroupByCount(); ++group_idx) {
1096  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1097  }
1098  return groupby_exprs;
1099 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
const size_t getGroupByCount() const
bool isAggregate() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelAggregate aggregate,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1101 of file RelAlgExecutor.cpp.

References RelAggregate::getGroupByCount(), and set_transient_dict().

1103  {
1104  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1105  for (size_t group_idx = 0; group_idx < aggregate->getGroupByCount(); ++group_idx) {
1106  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1107  }
1108  return groupby_exprs;
1109 }
const size_t getGroupByCount() const
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)

+ Here is the call graph for this function:

QualsConjunctiveForm anonymous_namespace{RelAlgExecutor.cpp}::translate_quals ( const RelCompound compound,
const RelAlgTranslator translator 
)

Definition at line 1111 of file RelAlgExecutor.cpp.

References fold_expr(), RelCompound::getFilterExpr(), qual_to_conjunctive_form(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

1112  {
1113  const auto filter_rex = compound->getFilterExpr();
1114  const auto filter_expr =
1115  filter_rex ? translator.translateScalarRex(filter_rex) : nullptr;
1116  return filter_expr ? qual_to_conjunctive_form(fold_expr(filter_expr.get()))
1118 }
const RexScalar * getFilterExpr() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources ( const RA *  ra_node,
const RelAlgTranslator translator,
const ::ExecutorType  executor_type 
)

Definition at line 1028 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), fold_expr(), get_scalar_sources_size(), Native, rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::createTableFunctionWorkUnit().

1031  {
1032  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1033  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
1034  const auto scalar_rex = scalar_at(i, ra_node);
1035  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1036  // RexRef are synthetic scalars we append at the end of the real ones
1037  // for the sake of taking memory ownership, no real work needed here.
1038  continue;
1039  }
1040 
1041  const auto scalar_expr =
1042  rewrite_array_elements(translator.translateScalarRex(scalar_rex).get());
1043  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1044  if (executor_type == ExecutorType::Native) {
1045  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1046  } else {
1047  scalar_sources.push_back(cast_dict_to_none(fold_expr(rewritten_expr.get())));
1048  }
1049  }
1050 
1051  return scalar_sources;
1052 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources_for_update ( const RA *  ra_node,
const RelAlgTranslator translator,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const ColumnNameList colNames,
size_t  starting_projection_column_idx 
)

Definition at line 1055 of file RelAlgExecutor.cpp.

References get_scalar_sources_size(), rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

1061  {
1062  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1063  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
1064  const auto scalar_rex = scalar_at(i, ra_node);
1065  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1066  // RexRef are synthetic scalars we append at the end of the real ones
1067  // for the sake of taking memory ownership, no real work needed here.
1068  continue;
1069  }
1070 
1071  std::shared_ptr<Analyzer::Expr> translated_expr;
1072  if (i >= starting_projection_column_idx && i < get_scalar_sources_size(ra_node) - 1) {
1073  translated_expr = cast_to_column_type(translator.translateScalarRex(scalar_rex),
1074  tableId,
1075  cat,
1076  colNames[i - starting_projection_column_idx]);
1077  } else {
1078  translated_expr = translator.translateScalarRex(scalar_rex);
1079  }
1080  const auto scalar_expr = rewrite_array_elements(translated_expr.get());
1081  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1082  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1083  }
1084 
1085  return scalar_sources;
1086 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelCompound compound,
const RelAlgTranslator translator,
const ExecutorType  executor_type 
)

Definition at line 1120 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), CHECK(), CHECK_GE, CHECK_LE, fold_expr(), RexRef::getIndex(), RelCompound::getTargetExpr(), Analyzer::Var::kGROUPBY, Native, rewrite_expr(), set_transient_dict(), RelCompound::size(), RelAlgTranslator::translateAggregateRex(), RelAlgTranslator::translateScalarRex(), and var_ref().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1126  {
1127  std::vector<Analyzer::Expr*> target_exprs;
1128  for (size_t i = 0; i < compound->size(); ++i) {
1129  const auto target_rex = compound->getTargetExpr(i);
1130  const auto target_rex_agg = dynamic_cast<const RexAgg*>(target_rex);
1131  std::shared_ptr<Analyzer::Expr> target_expr;
1132  if (target_rex_agg) {
1133  target_expr =
1134  RelAlgTranslator::translateAggregateRex(target_rex_agg, scalar_sources);
1135  } else {
1136  const auto target_rex_scalar = dynamic_cast<const RexScalar*>(target_rex);
1137  const auto target_rex_ref = dynamic_cast<const RexRef*>(target_rex_scalar);
1138  if (target_rex_ref) {
1139  const auto ref_idx = target_rex_ref->getIndex();
1140  CHECK_GE(ref_idx, size_t(1));
1141  CHECK_LE(ref_idx, groupby_exprs.size());
1142  const auto groupby_expr = *std::next(groupby_exprs.begin(), ref_idx - 1);
1143  target_expr = var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, ref_idx);
1144  } else {
1145  target_expr = translator.translateScalarRex(target_rex_scalar);
1146  auto rewritten_expr = rewrite_expr(target_expr.get());
1147  target_expr = fold_expr(rewritten_expr.get());
1148  if (executor_type == ExecutorType::Native) {
1149  try {
1150  target_expr = set_transient_dict(target_expr);
1151  } catch (...) {
1152  // noop
1153  }
1154  } else {
1155  target_expr = cast_dict_to_none(target_expr);
1156  }
1157  }
1158  }
1159  CHECK(target_expr);
1160  target_exprs_owned.push_back(target_expr);
1161  target_exprs.push_back(target_expr.get());
1162  }
1163  return target_exprs;
1164 }
const Rex * getTargetExpr(const size_t i) const
size_t getIndex() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t size() const override
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1582
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
CHECK(cgen_state)
#define CHECK_LE(x, y)
Definition: Logger.h:208
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelAggregate aggregate,
const RelAlgTranslator translator 
)

Definition at line 1166 of file RelAlgExecutor.cpp.

References CHECK(), fold_expr(), RelAggregate::getAggExprs(), Analyzer::Var::kGROUPBY, RelAlgTranslator::translateAggregateRex(), and var_ref().

1171  {
1172  std::vector<Analyzer::Expr*> target_exprs;
1173  size_t group_key_idx = 0;
1174  for (const auto& groupby_expr : groupby_exprs) {
1175  auto target_expr =
1176  var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, group_key_idx++);
1177  target_exprs_owned.push_back(target_expr);
1178  target_exprs.push_back(target_expr.get());
1179  }
1180 
1181  for (const auto& target_rex_agg : aggregate->getAggExprs()) {
1182  auto target_expr =
1183  RelAlgTranslator::translateAggregateRex(target_rex_agg.get(), scalar_sources);
1184  CHECK(target_expr);
1185  target_expr = fold_expr(target_expr.get());
1186  target_exprs_owned.push_back(target_expr);
1187  target_exprs.push_back(target_expr.get());
1188  }
1189  return target_exprs;
1190 }
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1582
CHECK(cgen_state)
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function: