OmniSciDB  dfae7c3b14
anonymous_namespace{RelAlgExecutor.cpp} Namespace Reference

Classes

struct  ExecutorMutexHolder
 
class  RexUsedInputsVisitor
 

Functions

bool node_is_aggregate (const RelAlgNode *ra)
 
std::unordered_set< PhysicalInputget_physical_inputs (const Catalog_Namespace::Catalog &cat, const RelAlgNode *ra)
 
void check_sort_node_source_constraint (const RelSort *sort)
 
const RelAlgNodeget_data_sink (const RelAlgNode *ra_node)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs (const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs (const RelAggregate *aggregate, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs (const RelProject *project, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs (const RelTableFunction *table_func, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs (const RelFilter *filter, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs (const RelLogicalUnion *logical_union, const Catalog_Namespace::Catalog &)
 
int table_id_from_ra (const RelAlgNode *ra_node)
 
std::unordered_map< const RelAlgNode *, int > get_input_nest_levels (const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
 
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_join_source_used_inputs (const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
 
void collect_used_input_desc (std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput *> &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
template<class RA >
std::pair< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > > > get_input_desc_impl (const RA *ra_node, const std::unordered_set< const RexInput *> &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
template<class RA >
std::tuple< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > >, std::vector< std::shared_ptr< RexInput > > > get_input_desc (const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
size_t get_scalar_sources_size (const RelCompound *compound)
 
size_t get_scalar_sources_size (const RelProject *project)
 
size_t get_scalar_sources_size (const RelTableFunction *table_func)
 
const RexScalarscalar_at (const size_t i, const RelCompound *compound)
 
const RexScalarscalar_at (const size_t i, const RelProject *project)
 
const RexScalarscalar_at (const size_t i, const RelTableFunction *table_func)
 
std::shared_ptr< Analyzer::Exprset_transient_dict (const std::shared_ptr< Analyzer::Expr > expr)
 
void set_transient_dict_maybe (std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
 
std::shared_ptr< Analyzer::Exprcast_dict_to_none (const std::shared_ptr< Analyzer::Expr > &input)
 
template<class RA >
std::vector< std::shared_ptr< Analyzer::Expr > > translate_scalar_sources (const RA *ra_node, const RelAlgTranslator &translator, const ::ExecutorType executor_type)
 
template<class RA >
std::vector< std::shared_ptr< Analyzer::Expr > > translate_scalar_sources_for_update (const RA *ra_node, const RelAlgTranslator &translator, int32_t tableId, const Catalog_Namespace::Catalog &cat, const ColumnNameList &colNames, size_t starting_projection_column_idx)
 
std::list< std::shared_ptr< Analyzer::Expr > > translate_groupby_exprs (const RelCompound *compound, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
std::list< std::shared_ptr< Analyzer::Expr > > translate_groupby_exprs (const RelAggregate *aggregate, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
QualsConjunctiveForm translate_quals (const RelCompound *compound, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelCompound *compound, const RelAlgTranslator &translator, const ExecutorType executor_type)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelAggregate *aggregate, const RelAlgTranslator &translator)
 
bool is_count_distinct (const Analyzer::Expr *expr)
 
bool is_agg (const Analyzer::Expr *expr)
 
SQLTypeInfo get_logical_type_for_expr (const Analyzer::Expr &expr)
 
template<class RA >
std::vector< TargetMetaInfoget_targets_meta (const RA *ra_node, const std::vector< Analyzer::Expr *> &target_exprs)
 
template<>
std::vector< TargetMetaInfoget_targets_meta (const RelFilter *filter, const std::vector< Analyzer::Expr *> &target_exprs)
 
bool is_window_execution_unit (const RelAlgExecutionUnit &ra_exe_unit)
 
std::shared_ptr< Analyzer::Exprtransform_to_inner (const Analyzer::Expr *expr)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const ColumnDescriptor *cd, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
std::list< Analyzer::OrderEntryget_order_entries (const RelSort *sort)
 
size_t get_scan_limit (const RelAlgNode *ra, const size_t limit)
 
bool first_oe_is_desc (const std::list< Analyzer::OrderEntry > &order_entries)
 
size_t groups_approx_upper_bound (const std::vector< InputTableInfo > &table_infos)
 
bool compute_output_buffer_size (const RelAlgExecutionUnit &ra_exe_unit)
 
bool exe_unit_has_quals (const RelAlgExecutionUnit ra_exe_unit)
 
RelAlgExecutionUnit decide_approx_count_distinct_implementation (const RelAlgExecutionUnit &ra_exe_unit_in, const std::vector< InputTableInfo > &table_infos, const Executor *executor, const ExecutorDeviceType device_type_in, std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned)
 
void build_render_targets (RenderInfo &render_info, const std::vector< Analyzer::Expr *> &work_unit_target_exprs, const std::vector< TargetMetaInfo > &targets_meta)
 
bool can_use_bump_allocator (const RelAlgExecutionUnit &ra_exe_unit, const CompilationOptions &co, const ExecutionOptions &eo)
 
JoinType get_join_type (const RelAlgNode *ra)
 
std::unique_ptr< const RexOperatorget_bitwise_equals (const RexScalar *scalar)
 
std::unique_ptr< const RexOperatorget_bitwise_equals_conjunction (const RexScalar *scalar)
 
std::vector< JoinTypeleft_deep_join_types (const RelLeftDeepInnerJoin *left_deep_join)
 
template<class RA >
std::vector< size_t > do_table_reordering (std::vector< InputDescriptor > &input_descs, std::list< std::shared_ptr< const InputColDescriptor >> &input_col_descs, const JoinQualsPerNestingLevel &left_deep_join_quals, std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const RA *node, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::vector< size_t > get_left_deep_join_input_sizes (const RelLeftDeepInnerJoin *left_deep_join)
 
std::list< std::shared_ptr< Analyzer::Expr > > rewrite_quals (const std::list< std::shared_ptr< Analyzer::Expr >> &quals)
 
std::vector< const RexScalar * > rex_to_conjunctive_form (const RexScalar *qual_expr)
 
std::shared_ptr< Analyzer::Exprbuild_logical_expression (const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
 
template<class QualsList >
bool list_contains_expression (const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
 
std::shared_ptr< Analyzer::Exprreverse_logical_distribution (const std::shared_ptr< Analyzer::Expr > &expr)
 
std::vector< std::shared_ptr< Analyzer::Expr > > synthesize_inputs (const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
std::vector< std::shared_ptr< Analyzer::Expr > > target_exprs_for_union (RelAlgNode const *input_node)
 
std::pair< std::vector< TargetMetaInfo >, std::vector< std::shared_ptr< Analyzer::Expr > > > get_inputs_meta (const RelFilter *filter, const RelAlgTranslator &translator, const std::vector< std::shared_ptr< RexInput >> &inputs_owned, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 

Function Documentation

◆ build_logical_expression()

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::build_logical_expression ( const std::vector< std::shared_ptr< Analyzer::Expr >> &  factors,
const SQLOps  sql_op 
)

Definition at line 3378 of file RelAlgExecutor.cpp.

References CHECK, kONE, and Parser::OperExpr::normalize().

Referenced by reverse_logical_distribution().

3380  {
3381  CHECK(!factors.empty());
3382  auto acc = factors.front();
3383  for (size_t i = 1; i < factors.size(); ++i) {
3384  acc = Parser::OperExpr::normalize(sql_op, kONE, acc, factors[i]);
3385  }
3386  return acc;
3387 }
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:270
Definition: sqldefs.h:69
#define CHECK(condition)
Definition: Logger.h:197
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ build_render_targets()

void anonymous_namespace{RelAlgExecutor.cpp}::build_render_targets ( RenderInfo render_info,
const std::vector< Analyzer::Expr *> &  work_unit_target_exprs,
const std::vector< TargetMetaInfo > &  targets_meta 
)

Definition at line 2660 of file RelAlgExecutor.cpp.

References CHECK_EQ, and RenderInfo::targets.

Referenced by RelAlgExecutor::executeWorkUnit().

2662  {
2663  CHECK_EQ(work_unit_target_exprs.size(), targets_meta.size());
2664  render_info.targets.clear();
2665  for (size_t i = 0; i < targets_meta.size(); ++i) {
2666  render_info.targets.emplace_back(std::make_shared<Analyzer::TargetEntry>(
2667  targets_meta[i].get_resname(),
2668  work_unit_target_exprs[i]->get_shared_ptr(),
2669  false));
2670  }
2671 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::vector< std::shared_ptr< Analyzer::TargetEntry > > targets
Definition: RenderInfo.h:37
+ Here is the caller graph for this function:

◆ can_use_bump_allocator()

bool anonymous_namespace{RelAlgExecutor.cpp}::can_use_bump_allocator ( const RelAlgExecutionUnit ra_exe_unit,
const CompilationOptions co,
const ExecutionOptions eo 
)
inline

Definition at line 2673 of file RelAlgExecutor.cpp.

References CompilationOptions::device_type, g_enable_bump_allocator, GPU, SortInfo::order_entries, ExecutionOptions::output_columnar_hint, and RelAlgExecutionUnit::sort_info.

Referenced by RelAlgExecutor::executeWorkUnit().

2675  {
2677  !eo.output_columnar_hint && ra_exe_unit.sort_info.order_entries.empty();
2678 }
const std::list< Analyzer::OrderEntry > order_entries
const SortInfo sort_info
bool g_enable_bump_allocator
Definition: Execute.cpp:104
ExecutorDeviceType device_type
+ Here is the caller graph for this function:

◆ cast_dict_to_none()

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::cast_dict_to_none ( const std::shared_ptr< Analyzer::Expr > &  input)

Definition at line 1143 of file RelAlgExecutor.cpp.

References kENCODING_DICT, and kTEXT.

Referenced by translate_scalar_sources(), and translate_targets().

1144  {
1145  const auto& input_ti = input->get_type_info();
1146  if (input_ti.is_string() && input_ti.get_compression() == kENCODING_DICT) {
1147  return input->add_cast(SQLTypeInfo(kTEXT, input_ti.get_notnull()));
1148  }
1149  return input;
1150 }
Definition: sqltypes.h:54
+ Here is the caller graph for this function:

◆ check_sort_node_source_constraint()

void anonymous_namespace{RelAlgExecutor.cpp}::check_sort_node_source_constraint ( const RelSort sort)
inline

Definition at line 404 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

404  {
405  CHECK_EQ(size_t(1), sort->inputCount());
406  const auto source = sort->getInput(0);
407  if (dynamic_cast<const RelSort*>(source)) {
408  throw std::runtime_error("Sort node not supported as input to another sort");
409  }
410 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
const size_t inputCount() const
const RelAlgNode * getInput(const size_t idx) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ collect_used_input_desc()

void anonymous_namespace{RelAlgExecutor.cpp}::collect_used_input_desc ( std::vector< InputDescriptor > &  input_descs,
const Catalog_Namespace::Catalog cat,
std::unordered_set< std::shared_ptr< const InputColDescriptor >> &  input_col_descs_unique,
const RelAlgNode ra_node,
const std::unordered_set< const RexInput *> &  source_used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 991 of file RelAlgExecutor.cpp.

References Catalog_Namespace::Catalog::getColumnIdBySpi(), table_id_from_ra(), RelAlgNode::toString(), and VLOG.

Referenced by get_input_desc_impl().

997  {
998  VLOG(3) << "ra_node=" << ra_node->toString()
999  << " input_col_descs_unique.size()=" << input_col_descs_unique.size()
1000  << " source_used_inputs.size()=" << source_used_inputs.size();
1001  for (const auto used_input : source_used_inputs) {
1002  const auto input_ra = used_input->getSourceNode();
1003  const int table_id = table_id_from_ra(input_ra);
1004  const auto col_id = used_input->getIndex();
1005  auto it = input_to_nest_level.find(input_ra);
1006  if (it != input_to_nest_level.end()) {
1007  const int input_desc = it->second;
1008  input_col_descs_unique.insert(std::make_shared<const InputColDescriptor>(
1009  dynamic_cast<const RelScan*>(input_ra)
1010  ? cat.getColumnIdBySpi(table_id, col_id + 1)
1011  : col_id,
1012  table_id,
1013  input_desc));
1014  } else if (!dynamic_cast<const RelLogicalUnion*>(ra_node)) {
1015  throw std::runtime_error("Bushy joins not supported");
1016  }
1017  }
1018 }
int table_id_from_ra(const RelAlgNode *ra_node)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1542
virtual std::string toString() const =0
#define VLOG(n)
Definition: Logger.h:291
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ compute_output_buffer_size()

bool anonymous_namespace{RelAlgExecutor.cpp}::compute_output_buffer_size ( const RelAlgExecutionUnit ra_exe_unit)

Determines whether a query needs to compute the size of its output buffer. Returns true for projection queries with no LIMIT or a LIMIT that exceeds the high scan limit threshold (meaning it would be cheaper to compute the number of rows passing or use the bump allocator than allocate the current scan limit per GPU)

Definition at line 2572 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::groupby_exprs, Executor::high_scan_limit, RelAlgExecutionUnit::scan_limit, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

2572  {
2573  for (const auto target_expr : ra_exe_unit.target_exprs) {
2574  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
2575  return false;
2576  }
2577  }
2578  if (ra_exe_unit.groupby_exprs.size() == 1 && !ra_exe_unit.groupby_exprs.front() &&
2579  (!ra_exe_unit.scan_limit || ra_exe_unit.scan_limit > Executor::high_scan_limit)) {
2580  return true;
2581  }
2582  return false;
2583 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
static const size_t high_scan_limit
Definition: Execute.h:408
+ Here is the caller graph for this function:

◆ decide_approx_count_distinct_implementation()

RelAlgExecutionUnit anonymous_namespace{RelAlgExecutor.cpp}::decide_approx_count_distinct_implementation ( const RelAlgExecutionUnit ra_exe_unit_in,
const std::vector< InputTableInfo > &  table_infos,
const Executor executor,
const ExecutorDeviceType  device_type_in,
std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned 
)

Definition at line 2590 of file RelAlgExecutor.cpp.

References Bitmap, CHECK, CHECK_GE, g_bigint_count, g_cluster, g_hll_precision_bits, get_agg_type(), get_count_distinct_sub_bitmap_count(), get_target_info(), getExpressionRange(), GPU, hll_size_for_rate(), Integer, kAPPROX_COUNT_DISTINCT, kCOUNT, kENCODING_DICT, kINT, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit(), and RelAlgExecutor::handleOutOfMemoryRetry().

2595  {
2596  RelAlgExecutionUnit ra_exe_unit = ra_exe_unit_in;
2597  for (size_t i = 0; i < ra_exe_unit.target_exprs.size(); ++i) {
2598  const auto target_expr = ra_exe_unit.target_exprs[i];
2599  const auto agg_info = get_target_info(target_expr, g_bigint_count);
2600  if (agg_info.agg_kind != kAPPROX_COUNT_DISTINCT) {
2601  continue;
2602  }
2603  CHECK(dynamic_cast<const Analyzer::AggExpr*>(target_expr));
2604  const auto arg = static_cast<Analyzer::AggExpr*>(target_expr)->get_own_arg();
2605  CHECK(arg);
2606  const auto& arg_ti = arg->get_type_info();
2607  // Avoid calling getExpressionRange for variable length types (string and array),
2608  // it'd trigger an assertion since that API expects to be called only for types
2609  // for which the notion of range is well-defined. A bit of a kludge, but the
2610  // logic to reject these types anyway is at lower levels in the stack and not
2611  // really worth pulling into a separate function for now.
2612  if (!(arg_ti.is_number() || arg_ti.is_boolean() || arg_ti.is_time() ||
2613  (arg_ti.is_string() && arg_ti.get_compression() == kENCODING_DICT))) {
2614  continue;
2615  }
2616  const auto arg_range = getExpressionRange(arg.get(), table_infos, executor);
2617  if (arg_range.getType() != ExpressionRangeType::Integer) {
2618  continue;
2619  }
2620  // When running distributed, the threshold for using the precise implementation
2621  // must be consistent across all leaves, otherwise we could have a mix of precise
2622  // and approximate bitmaps and we cannot aggregate them.
2623  const auto device_type = g_cluster ? ExecutorDeviceType::GPU : device_type_in;
2624  const auto bitmap_sz_bits = arg_range.getIntMax() - arg_range.getIntMin() + 1;
2625  const auto sub_bitmap_count =
2626  get_count_distinct_sub_bitmap_count(bitmap_sz_bits, ra_exe_unit, device_type);
2627  int64_t approx_bitmap_sz_bits{0};
2628  const auto error_rate =
2629  static_cast<Analyzer::AggExpr*>(target_expr)->get_error_rate();
2630  if (error_rate) {
2631  CHECK(error_rate->get_type_info().get_type() == kINT);
2632  CHECK_GE(error_rate->get_constval().intval, 1);
2633  approx_bitmap_sz_bits = hll_size_for_rate(error_rate->get_constval().intval);
2634  } else {
2635  approx_bitmap_sz_bits = g_hll_precision_bits;
2636  }
2637  CountDistinctDescriptor approx_count_distinct_desc{CountDistinctImplType::Bitmap,
2638  arg_range.getIntMin(),
2639  approx_bitmap_sz_bits,
2640  true,
2641  device_type,
2642  sub_bitmap_count};
2643  CountDistinctDescriptor precise_count_distinct_desc{CountDistinctImplType::Bitmap,
2644  arg_range.getIntMin(),
2645  bitmap_sz_bits,
2646  false,
2647  device_type,
2648  sub_bitmap_count};
2649  if (approx_count_distinct_desc.bitmapPaddedSizeBytes() >=
2650  precise_count_distinct_desc.bitmapPaddedSizeBytes()) {
2651  auto precise_count_distinct = makeExpr<Analyzer::AggExpr>(
2652  get_agg_type(kCOUNT, arg.get()), kCOUNT, arg, true, nullptr);
2653  target_exprs_owned.push_back(precise_count_distinct);
2654  ra_exe_unit.target_exprs[i] = precise_count_distinct.get();
2655  }
2656  }
2657  return ra_exe_unit;
2658 }
std::vector< Analyzer::Expr * > target_exprs
int hll_size_for_rate(const int err_percent)
Definition: HyperLogLog.h:115
TargetInfo get_target_info(const PointerType target_expr, const bool bigint_count)
Definition: TargetInfo.h:78
#define CHECK_GE(x, y)
Definition: Logger.h:210
SQLTypeInfo get_agg_type(const SQLAgg agg_kind, const Analyzer::Expr *arg_expr)
int g_hll_precision_bits
size_t get_count_distinct_sub_bitmap_count(const size_t bitmap_sz_bits, const RelAlgExecutionUnit &ra_exe_unit, const ExecutorDeviceType device_type)
bool g_bigint_count
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)
Definition: sqldefs.h:76
#define CHECK(condition)
Definition: Logger.h:197
bool g_cluster
Definition: sqltypes.h:47
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ do_table_reordering()

template<class RA >
std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::do_table_reordering ( std::vector< InputDescriptor > &  input_descs,
std::list< std::shared_ptr< const InputColDescriptor >> &  input_col_descs,
const JoinQualsPerNestingLevel left_deep_join_quals,
std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const RA *  node,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 3224 of file RelAlgExecutor.cpp.

References cat(), CHECK, g_cluster, get_input_desc(), get_input_nest_levels(), get_node_input_permutation(), and table_is_replicated().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3231  {
3232  if (g_cluster) {
3233  // Disable table reordering in distributed mode. The aggregator does not have enough
3234  // information to break ties
3235  return {};
3236  }
3237  const auto& cat = *executor->getCatalog();
3238  for (const auto& table_info : query_infos) {
3239  if (table_info.table_id < 0) {
3240  continue;
3241  }
3242  const auto td = cat.getMetadataForTable(table_info.table_id);
3243  CHECK(td);
3244  if (table_is_replicated(td)) {
3245  return {};
3246  }
3247  }
3248  const auto input_permutation =
3249  get_node_input_permutation(left_deep_join_quals, query_infos, executor);
3250  input_to_nest_level = get_input_nest_levels(node, input_permutation);
3251  std::tie(input_descs, input_col_descs, std::ignore) =
3252  get_input_desc(node, input_to_nest_level, input_permutation, cat);
3253  return input_permutation;
3254 }
std::unordered_map< const RelAlgNode *, int > get_input_nest_levels(const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
std::vector< node_t > get_node_input_permutation(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor)
std::string cat(Ts &&... args)
bool table_is_replicated(const TableDescriptor *td)
#define CHECK(condition)
Definition: Logger.h:197
bool g_cluster
std::tuple< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > >, std::vector< std::shared_ptr< RexInput > > > get_input_desc(const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ exe_unit_has_quals()

bool anonymous_namespace{RelAlgExecutor.cpp}::exe_unit_has_quals ( const RelAlgExecutionUnit  ra_exe_unit)
inline

Definition at line 2585 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::join_quals, RelAlgExecutionUnit::quals, and RelAlgExecutionUnit::simple_quals.

Referenced by RelAlgExecutor::executeWorkUnit().

2585  {
2586  return !(ra_exe_unit.quals.empty() && ra_exe_unit.join_quals.empty() &&
2587  ra_exe_unit.simple_quals.empty());
2588 }
const JoinQualsPerNestingLevel join_quals
std::list< std::shared_ptr< Analyzer::Expr > > quals
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals
+ Here is the caller graph for this function:

◆ first_oe_is_desc()

bool anonymous_namespace{RelAlgExecutor.cpp}::first_oe_is_desc ( const std::list< Analyzer::OrderEntry > &  order_entries)

Definition at line 2357 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2357  {
2358  return !order_entries.empty() && order_entries.front().is_desc;
2359 }
+ Here is the caller graph for this function:

◆ get_bitwise_equals()

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals ( const RexScalar scalar)

Definition at line 3143 of file RelAlgExecutor.cpp.

References CHECK_EQ, kAND, kBW_EQ, kEQ, kISNULL, kOR, and RexVisitorBase< T >::visit().

Referenced by get_bitwise_equals_conjunction().

3143  {
3144  const auto condition = dynamic_cast<const RexOperator*>(scalar);
3145  if (!condition || condition->getOperator() != kOR || condition->size() != 2) {
3146  return nullptr;
3147  }
3148  const auto equi_join_condition =
3149  dynamic_cast<const RexOperator*>(condition->getOperand(0));
3150  if (!equi_join_condition || equi_join_condition->getOperator() != kEQ) {
3151  return nullptr;
3152  }
3153  const auto both_are_null_condition =
3154  dynamic_cast<const RexOperator*>(condition->getOperand(1));
3155  if (!both_are_null_condition || both_are_null_condition->getOperator() != kAND ||
3156  both_are_null_condition->size() != 2) {
3157  return nullptr;
3158  }
3159  const auto lhs_is_null =
3160  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(0));
3161  const auto rhs_is_null =
3162  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(1));
3163  if (!lhs_is_null || !rhs_is_null || lhs_is_null->getOperator() != kISNULL ||
3164  rhs_is_null->getOperator() != kISNULL) {
3165  return nullptr;
3166  }
3167  CHECK_EQ(size_t(1), lhs_is_null->size());
3168  CHECK_EQ(size_t(1), rhs_is_null->size());
3169  CHECK_EQ(size_t(2), equi_join_condition->size());
3170  const auto eq_lhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(0));
3171  const auto eq_rhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(1));
3172  const auto is_null_lhs = dynamic_cast<const RexInput*>(lhs_is_null->getOperand(0));
3173  const auto is_null_rhs = dynamic_cast<const RexInput*>(rhs_is_null->getOperand(0));
3174  if (!eq_lhs || !eq_rhs || !is_null_lhs || !is_null_rhs) {
3175  return nullptr;
3176  }
3177  std::vector<std::unique_ptr<const RexScalar>> eq_operands;
3178  if (*eq_lhs == *is_null_lhs && *eq_rhs == *is_null_rhs) {
3179  RexDeepCopyVisitor deep_copy_visitor;
3180  auto lhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(0));
3181  auto rhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(1));
3182  eq_operands.emplace_back(lhs_op_copy.release());
3183  eq_operands.emplace_back(rhs_op_copy.release());
3184  return boost::make_unique<const RexOperator>(
3185  kBW_EQ, eq_operands, equi_join_condition->getType());
3186  }
3187  return nullptr;
3188 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
Definition: sqldefs.h:38
Definition: sqldefs.h:30
Definition: sqldefs.h:37
virtual T visit(const RexScalar *rex_scalar) const
Definition: RexVisitor.h:27
Definition: sqldefs.h:31
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_bitwise_equals_conjunction()

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals_conjunction ( const RexScalar scalar)

Definition at line 3190 of file RelAlgExecutor.cpp.

References CHECK_GE, get_bitwise_equals(), and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3191  {
3192  const auto condition = dynamic_cast<const RexOperator*>(scalar);
3193  if (condition && condition->getOperator() == kAND) {
3194  CHECK_GE(condition->size(), size_t(2));
3195  auto acc = get_bitwise_equals(condition->getOperand(0));
3196  if (!acc) {
3197  return nullptr;
3198  }
3199  for (size_t i = 1; i < condition->size(); ++i) {
3200  std::vector<std::unique_ptr<const RexScalar>> and_operands;
3201  and_operands.emplace_back(std::move(acc));
3202  and_operands.emplace_back(get_bitwise_equals_conjunction(condition->getOperand(i)));
3203  acc =
3204  boost::make_unique<const RexOperator>(kAND, and_operands, condition->getType());
3205  }
3206  return acc;
3207  }
3208  return get_bitwise_equals(scalar);
3209 }
std::unique_ptr< const RexOperator > get_bitwise_equals_conjunction(const RexScalar *scalar)
#define CHECK_GE(x, y)
Definition: Logger.h:210
Definition: sqldefs.h:37
std::unique_ptr< const RexOperator > get_bitwise_equals(const RexScalar *scalar)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_data_sink()

const RelAlgNode* anonymous_namespace{RelAlgExecutor.cpp}::get_data_sink ( const RelAlgNode ra_node)

Definition at line 791 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), RelAlgNode::inputCount(), and join().

Referenced by get_input_desc_impl(), get_input_nest_levels(), get_inputs_meta(), get_join_source_used_inputs(), get_join_type(), and get_used_inputs().

791  {
792  if (auto table_func = dynamic_cast<const RelTableFunction*>(ra_node)) {
793  return table_func;
794  }
795  if (auto join = dynamic_cast<const RelJoin*>(ra_node)) {
796  CHECK_EQ(size_t(2), join->inputCount());
797  return join;
798  }
799  if (!dynamic_cast<const RelLogicalUnion*>(ra_node)) {
800  CHECK_EQ(size_t(1), ra_node->inputCount());
801  }
802  auto only_src = ra_node->getInput(0);
803  const bool is_join = dynamic_cast<const RelJoin*>(only_src) ||
804  dynamic_cast<const RelLeftDeepInnerJoin*>(only_src);
805  return is_join ? only_src : ra_node;
806 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
const size_t inputCount() const
const RelAlgNode * getInput(const size_t idx) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_input_desc()

template<class RA >
std::tuple<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> >, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc ( const RA *  ra_node,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 1082 of file RelAlgExecutor.cpp.

References get_input_desc_impl(), get_used_inputs(), and VLOG.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and do_table_reordering().

1085  {
1086  std::unordered_set<const RexInput*> used_inputs;
1087  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
1088  std::tie(used_inputs, used_inputs_owned) = get_used_inputs(ra_node, cat);
1089  VLOG(3) << "used_inputs.size() = " << used_inputs.size();
1090  auto input_desc_pair = get_input_desc_impl(
1091  ra_node, used_inputs, input_to_nest_level, input_permutation, cat);
1092  return std::make_tuple(
1093  input_desc_pair.first, input_desc_pair.second, used_inputs_owned);
1094 }
std::pair< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > > > get_input_desc_impl(const RA *ra_node, const std::unordered_set< const RexInput *> &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs(const RelLogicalUnion *logical_union, const Catalog_Namespace::Catalog &)
#define VLOG(n)
Definition: Logger.h:291
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_input_desc_impl()

template<class RA >
std::pair<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc_impl ( const RA *  ra_node,
const std::unordered_set< const RexInput *> &  used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 1023 of file RelAlgExecutor.cpp.

References collect_used_input_desc(), get_data_sink(), get_join_source_used_inputs(), InputDescriptor::getNestLevel(), and table_id_from_ra().

Referenced by get_input_desc().

1027  {
1028  std::vector<InputDescriptor> input_descs;
1029  const auto data_sink_node = get_data_sink(ra_node);
1030  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
1031  const auto input_node_idx =
1032  input_permutation.empty() ? input_idx : input_permutation[input_idx];
1033  auto input_ra = data_sink_node->getInput(input_node_idx);
1034  const int table_id = table_id_from_ra(input_ra);
1035  input_descs.emplace_back(table_id, input_idx);
1036  }
1037  std::sort(input_descs.begin(),
1038  input_descs.end(),
1039  [](const InputDescriptor& lhs, const InputDescriptor& rhs) {
1040  return lhs.getNestLevel() < rhs.getNestLevel();
1041  });
1042  std::unordered_set<std::shared_ptr<const InputColDescriptor>> input_col_descs_unique;
1043  collect_used_input_desc(input_descs,
1044  cat,
1045  input_col_descs_unique, // modified
1046  ra_node,
1047  used_inputs,
1048  input_to_nest_level);
1049  std::unordered_set<const RexInput*> join_source_used_inputs;
1050  std::vector<std::shared_ptr<RexInput>> join_source_used_inputs_owned;
1051  std::tie(join_source_used_inputs, join_source_used_inputs_owned) =
1052  get_join_source_used_inputs(ra_node, cat);
1053  collect_used_input_desc(input_descs,
1054  cat,
1055  input_col_descs_unique, // modified
1056  ra_node,
1057  join_source_used_inputs,
1058  input_to_nest_level);
1059  std::vector<std::shared_ptr<const InputColDescriptor>> input_col_descs(
1060  input_col_descs_unique.begin(), input_col_descs_unique.end());
1061 
1062  std::sort(input_col_descs.begin(),
1063  input_col_descs.end(),
1064  [](std::shared_ptr<const InputColDescriptor> const& lhs,
1065  std::shared_ptr<const InputColDescriptor> const& rhs) {
1066  return std::make_tuple(lhs->getScanDesc().getNestLevel(),
1067  lhs->getColId(),
1068  lhs->getScanDesc().getTableId()) <
1069  std::make_tuple(rhs->getScanDesc().getNestLevel(),
1070  rhs->getColId(),
1071  rhs->getScanDesc().getTableId());
1072  });
1073  return {input_descs,
1074  std::list<std::shared_ptr<const InputColDescriptor>>(input_col_descs.begin(),
1075  input_col_descs.end())};
1076 }
int getNestLevel() const
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_join_source_used_inputs(const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
int table_id_from_ra(const RelAlgNode *ra_node)
void collect_used_input_desc(std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput *> &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_input_nest_levels()

std::unordered_map<const RelAlgNode*, int> anonymous_namespace{RelAlgExecutor.cpp}::get_input_nest_levels ( const RelAlgNode ra_node,
const std::vector< size_t > &  input_permutation 
)

Definition at line 928 of file RelAlgExecutor.cpp.

References CHECK, get_data_sink(), logger::INFO, and LOG_IF.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and do_table_reordering().

930  {
931  const auto data_sink_node = get_data_sink(ra_node);
932  std::unordered_map<const RelAlgNode*, int> input_to_nest_level;
933  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
934  const auto input_node_idx =
935  input_permutation.empty() ? input_idx : input_permutation[input_idx];
936  const auto input_ra = data_sink_node->getInput(input_node_idx);
937  // Having a non-zero mapped value (input_idx) results in the query being interpretted
938  // as a JOIN within CodeGenerator::codegenColVar() due to rte_idx being set to the
939  // mapped value (input_idx) which originates here. This would be incorrect for UNION.
940  size_t const idx = dynamic_cast<const RelLogicalUnion*>(ra_node) ? 0 : input_idx;
941  const auto it_ok = input_to_nest_level.emplace(input_ra, idx);
942  CHECK(it_ok.second);
943  LOG_IF(INFO, !input_permutation.empty())
944  << "Assigned input " << input_ra->toString() << " to nest level " << input_idx;
945  }
946  return input_to_nest_level;
947 }
#define LOG_IF(severity, condition)
Definition: Logger.h:287
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_inputs_meta()

std::pair<std::vector<TargetMetaInfo>, std::vector<std::shared_ptr<Analyzer::Expr> > > anonymous_namespace{RelAlgExecutor.cpp}::get_inputs_meta ( const RelFilter filter,
const RelAlgTranslator translator,
const std::vector< std::shared_ptr< RexInput >> &  inputs_owned,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3867 of file RelAlgExecutor.cpp.

References CHECK, get_data_sink(), get_exprs_not_owned(), get_targets_meta(), synthesize_inputs(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createFilterWorkUnit().

3870  {
3871  std::vector<TargetMetaInfo> in_metainfo;
3872  std::vector<std::shared_ptr<Analyzer::Expr>> exprs_owned;
3873  const auto data_sink_node = get_data_sink(filter);
3874  auto input_it = inputs_owned.begin();
3875  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
3876  const auto source = data_sink_node->getInput(nest_level);
3877  const auto scan_source = dynamic_cast<const RelScan*>(source);
3878  if (scan_source) {
3879  CHECK(source->getOutputMetainfo().empty());
3880  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources_owned;
3881  for (size_t i = 0; i < scan_source->size(); ++i, ++input_it) {
3882  scalar_sources_owned.push_back(translator.translateScalarRex(input_it->get()));
3883  }
3884  const auto source_metadata =
3885  get_targets_meta(scan_source, get_exprs_not_owned(scalar_sources_owned));
3886  in_metainfo.insert(
3887  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3888  exprs_owned.insert(
3889  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3890  } else {
3891  const auto& source_metadata = source->getOutputMetainfo();
3892  input_it += source_metadata.size();
3893  in_metainfo.insert(
3894  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3895  const auto scalar_sources_owned = synthesize_inputs(
3896  data_sink_node, nest_level, source_metadata, input_to_nest_level);
3897  exprs_owned.insert(
3898  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3899  }
3900  }
3901  return std::make_pair(in_metainfo, exprs_owned);
3902 }
std::vector< std::shared_ptr< Analyzer::Expr > > synthesize_inputs(const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::vector< Analyzer::Expr * > get_exprs_not_owned(const std::vector< std::shared_ptr< Analyzer::Expr >> &exprs)
Definition: Execute.h:226
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
std::vector< TargetMetaInfo > get_targets_meta(const RelFilter *filter, const std::vector< Analyzer::Expr *> &target_exprs)
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_join_source_used_inputs()

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_join_source_used_inputs ( const RelAlgNode ra_node,
const Catalog_Namespace::Catalog cat 
)

Definition at line 950 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, CHECK_GT, get_data_sink(), anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelAlgNode::inputCount(), join(), run_benchmark_import::result, RelAlgNode::toString(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc_impl().

951  {
952  const auto data_sink_node = get_data_sink(ra_node);
953  if (auto join = dynamic_cast<const RelJoin*>(data_sink_node)) {
954  CHECK_EQ(join->inputCount(), 2u);
955  const auto condition = join->getCondition();
956  RexUsedInputsVisitor visitor(cat);
957  auto condition_inputs = visitor.visit(condition);
958  std::vector<std::shared_ptr<RexInput>> condition_inputs_owned(
959  visitor.get_inputs_owned());
960  return std::make_pair(condition_inputs, condition_inputs_owned);
961  }
962 
963  if (auto left_deep_join = dynamic_cast<const RelLeftDeepInnerJoin*>(data_sink_node)) {
964  CHECK_GE(left_deep_join->inputCount(), 2u);
965  const auto condition = left_deep_join->getInnerCondition();
966  RexUsedInputsVisitor visitor(cat);
967  auto result = visitor.visit(condition);
968  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
969  ++nesting_level) {
970  const auto outer_condition = left_deep_join->getOuterCondition(nesting_level);
971  if (outer_condition) {
972  const auto outer_result = visitor.visit(outer_condition);
973  result.insert(outer_result.begin(), outer_result.end());
974  }
975  }
976  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
977  return std::make_pair(result, used_inputs_owned);
978  }
979 
980  if (dynamic_cast<const RelLogicalUnion*>(ra_node)) {
981  CHECK_GT(ra_node->inputCount(), 1u) << ra_node->toString();
982  } else if (dynamic_cast<const RelTableFunction*>(ra_node)) {
983  CHECK_GT(ra_node->inputCount(), 0u) << ra_node->toString();
984  } else {
985  CHECK_EQ(ra_node->inputCount(), 1u) << ra_node->toString();
986  }
987  return std::make_pair(std::unordered_set<const RexInput*>{},
988  std::vector<std::shared_ptr<RexInput>>{});
989 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
#define CHECK_GE(x, y)
Definition: Logger.h:210
#define CHECK_GT(x, y)
Definition: Logger.h:209
const size_t inputCount() const
virtual std::string toString() const =0
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_join_type()

JoinType anonymous_namespace{RelAlgExecutor.cpp}::get_join_type ( const RelAlgNode ra)

Definition at line 3131 of file RelAlgExecutor.cpp.

References get_data_sink(), INNER, INVALID, and join().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3131  {
3132  auto sink = get_data_sink(ra);
3133  if (auto join = dynamic_cast<const RelJoin*>(sink)) {
3134  return join->getJoinType();
3135  }
3136  if (dynamic_cast<const RelLeftDeepInnerJoin*>(sink)) {
3137  return JoinType::INNER;
3138  }
3139 
3140  return JoinType::INVALID;
3141 }
std::string join(T const &container, std::string const &delim)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_left_deep_join_input_sizes()

std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::get_left_deep_join_input_sizes ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 3256 of file RelAlgExecutor.cpp.

References get_node_output(), RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3257  {
3258  std::vector<size_t> input_sizes;
3259  for (size_t i = 0; i < left_deep_join->inputCount(); ++i) {
3260  const auto inputs = get_node_output(left_deep_join->getInput(i));
3261  input_sizes.push_back(inputs.size());
3262  }
3263  return input_sizes;
3264 }
const size_t inputCount() const
RANodeOutput get_node_output(const RelAlgNode *ra_node)
const RelAlgNode * getInput(const size_t idx) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_logical_type_for_expr()

SQLTypeInfo anonymous_namespace{RelAlgExecutor.cpp}::get_logical_type_for_expr ( const Analyzer::Expr expr)
inline

Definition at line 1337 of file RelAlgExecutor.cpp.

References get_logical_type_info(), get_nullable_logical_type_info(), Analyzer::Expr::get_type_info(), is_agg(), is_count_distinct(), and kBIGINT.

Referenced by get_targets_meta().

1337  {
1338  if (is_count_distinct(&expr)) {
1339  return SQLTypeInfo(kBIGINT, false);
1340  } else if (is_agg(&expr)) {
1342  }
1343  return get_logical_type_info(expr.get_type_info());
1344 }
bool is_agg(const Analyzer::Expr *expr)
SQLTypeInfo get_nullable_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:841
SQLTypeInfo get_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:820
bool is_count_distinct(const Analyzer::Expr *expr)
const SQLTypeInfo & get_type_info() const
Definition: Analyzer.h:78
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_order_entries()

std::list<Analyzer::OrderEntry> anonymous_namespace{RelAlgExecutor.cpp}::get_order_entries ( const RelSort sort)

Definition at line 2337 of file RelAlgExecutor.cpp.

References RelSort::collationCount(), Descending, First, RelSort::getCollation(), and run_benchmark_import::result.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2337  {
2338  std::list<Analyzer::OrderEntry> result;
2339  for (size_t i = 0; i < sort->collationCount(); ++i) {
2340  const auto sort_field = sort->getCollation(i);
2341  result.emplace_back(sort_field.getField() + 1,
2342  sort_field.getSortDir() == SortDirection::Descending,
2343  sort_field.getNullsPosition() == NullSortedPosition::First);
2344  }
2345  return result;
2346 }
size_t collationCount() const
SortField getCollation(const size_t i) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_physical_inputs()

std::unordered_set<PhysicalInput> anonymous_namespace{RelAlgExecutor.cpp}::get_physical_inputs ( const Catalog_Namespace::Catalog cat,
const RelAlgNode ra 
)

Definition at line 59 of file RelAlgExecutor.cpp.

References Catalog_Namespace::Catalog::getColumnIdBySpi().

Referenced by RelAlgExecutor::computeColRangesCache(), RelAlgExecutor::computeStringDictionaryGenerations(), RelAlgExecutor::executeRelAlgQueryNoRetry(), and RelAlgExecutor::getOuterFragmentCount().

61  {
62  auto phys_inputs = get_physical_inputs(ra);
63  std::unordered_set<PhysicalInput> phys_inputs2;
64  for (auto& phi : phys_inputs) {
65  phys_inputs2.insert(
66  PhysicalInput{cat.getColumnIdBySpi(phi.table_id, phi.col_id), phi.table_id});
67  }
68  return phys_inputs2;
69 }
std::unordered_set< PhysicalInput > get_physical_inputs(const Catalog_Namespace::Catalog &cat, const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1542
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_scalar_sources_size() [1/3]

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelCompound compound)

Definition at line 1096 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSourcesSize().

1096  {
1097  return compound->getScalarSourcesSize();
1098 }
const size_t getScalarSourcesSize() const
+ Here is the call graph for this function:

◆ get_scalar_sources_size() [2/3]

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelProject project)

Definition at line 1100 of file RelAlgExecutor.cpp.

References RelProject::size().

1100  {
1101  return project->size();
1102 }
size_t size() const override
+ Here is the call graph for this function:

◆ get_scalar_sources_size() [3/3]

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelTableFunction table_func)

Definition at line 1104 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputsSize().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1104  {
1105  return table_func->getTableFuncInputsSize();
1106 }
size_t getTableFuncInputsSize() const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_scan_limit()

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scan_limit ( const RelAlgNode ra,
const size_t  limit 
)

Definition at line 2348 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit().

2348  {
2349  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
2350  if (aggregate) {
2351  return 0;
2352  }
2353  const auto compound = dynamic_cast<const RelCompound*>(ra);
2354  return (compound && compound->isAggregate()) ? 0 : limit;
2355 }
+ Here is the caller graph for this function:

◆ get_targets_meta() [1/2]

template<class RA >
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RA *  ra_node,
const std::vector< Analyzer::Expr *> &  target_exprs 
)

Definition at line 1347 of file RelAlgExecutor.cpp.

References CHECK, CHECK_EQ, and get_logical_type_for_expr().

1349  {
1350  std::vector<TargetMetaInfo> targets_meta;
1351  CHECK_EQ(ra_node->size(), target_exprs.size());
1352  for (size_t i = 0; i < ra_node->size(); ++i) {
1353  CHECK(target_exprs[i]);
1354  // TODO(alex): remove the count distinct type fixup.
1355  targets_meta.emplace_back(ra_node->getFieldName(i),
1356  get_logical_type_for_expr(*target_exprs[i]),
1357  target_exprs[i]->get_type_info());
1358  }
1359  return targets_meta;
1360 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
SQLTypeInfo get_logical_type_for_expr(const Analyzer::Expr &expr)
#define CHECK(condition)
Definition: Logger.h:197
+ Here is the call graph for this function:

◆ get_targets_meta() [2/2]

template<>
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RelFilter filter,
const std::vector< Analyzer::Expr *> &  target_exprs 
)

Definition at line 1363 of file RelAlgExecutor.cpp.

References RelAlgNode::getInput(), RelAlgNode::toString(), and UNREACHABLE.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and get_inputs_meta().

1365  {
1366  RelAlgNode const* input0 = filter->getInput(0);
1367  if (auto const* input = dynamic_cast<RelCompound const*>(input0)) {
1368  return get_targets_meta(input, target_exprs);
1369  } else if (auto const* input = dynamic_cast<RelProject const*>(input0)) {
1370  return get_targets_meta(input, target_exprs);
1371  } else if (auto const* input = dynamic_cast<RelLogicalUnion const*>(input0)) {
1372  return get_targets_meta(input, target_exprs);
1373  } else if (auto const* input = dynamic_cast<RelAggregate const*>(input0)) {
1374  return get_targets_meta(input, target_exprs);
1375  } else if (auto const* input = dynamic_cast<RelScan const*>(input0)) {
1376  return get_targets_meta(input, target_exprs);
1377  }
1378  UNREACHABLE() << "Unhandled node type: " << input0->toString();
1379  return {};
1380 }
#define UNREACHABLE()
Definition: Logger.h:241
std::vector< TargetMetaInfo > get_targets_meta(const RelFilter *filter, const std::vector< Analyzer::Expr *> &target_exprs)
virtual std::string toString() const =0
const RelAlgNode * getInput(const size_t idx) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ get_used_inputs() [1/6]

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelCompound compound,
const Catalog_Namespace::Catalog cat 
)

Definition at line 809 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelCompound::getFilterExpr(), RelCompound::getScalarSource(), RelCompound::getScalarSourcesSize(), and RexVisitorBase< T >::visit().

809  {
810  RexUsedInputsVisitor visitor(cat);
811  const auto filter_expr = compound->getFilterExpr();
812  std::unordered_set<const RexInput*> used_inputs =
813  filter_expr ? visitor.visit(filter_expr) : std::unordered_set<const RexInput*>{};
814  const auto sources_size = compound->getScalarSourcesSize();
815  for (size_t i = 0; i < sources_size; ++i) {
816  const auto source_inputs = visitor.visit(compound->getScalarSource(i));
817  used_inputs.insert(source_inputs.begin(), source_inputs.end());
818  }
819  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
820  return std::make_pair(used_inputs, used_inputs_owned);
821 }
const size_t getScalarSourcesSize() const
const RexScalar * getScalarSource(const size_t i) const
const RexScalar * getFilterExpr() const
+ Here is the call graph for this function:

◆ get_used_inputs() [2/6]

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelAggregate aggregate,
const Catalog_Namespace::Catalog cat 
)

Definition at line 824 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, RelAggregate::getAggExprs(), RelAggregate::getGroupByCount(), RelAlgNode::getInput(), RelAlgNode::getOutputMetainfo(), and RelAlgNode::inputCount().

824  {
825  CHECK_EQ(size_t(1), aggregate->inputCount());
826  std::unordered_set<const RexInput*> used_inputs;
827  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
828  const auto source = aggregate->getInput(0);
829  const auto& in_metainfo = source->getOutputMetainfo();
830  const auto group_count = aggregate->getGroupByCount();
831  CHECK_GE(in_metainfo.size(), group_count);
832  for (size_t i = 0; i < group_count; ++i) {
833  auto synthesized_used_input = new RexInput(source, i);
834  used_inputs_owned.emplace_back(synthesized_used_input);
835  used_inputs.insert(synthesized_used_input);
836  }
837  for (const auto& agg_expr : aggregate->getAggExprs()) {
838  for (size_t i = 0; i < agg_expr->size(); ++i) {
839  const auto operand_idx = agg_expr->getOperand(i);
840  CHECK_GE(in_metainfo.size(), static_cast<size_t>(operand_idx));
841  auto synthesized_used_input = new RexInput(source, operand_idx);
842  used_inputs_owned.emplace_back(synthesized_used_input);
843  used_inputs.insert(synthesized_used_input);
844  }
845  }
846  return std::make_pair(used_inputs, used_inputs_owned);
847 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
#define CHECK_GE(x, y)
Definition: Logger.h:210
const std::vector< TargetMetaInfo > & getOutputMetainfo() const
const size_t getGroupByCount() const
const size_t inputCount() const
const RelAlgNode * getInput(const size_t idx) const
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
+ Here is the call graph for this function:

◆ get_used_inputs() [3/6]

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelProject project,
const Catalog_Namespace::Catalog cat 
)

Definition at line 850 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelProject::getProjectAt(), RelProject::size(), and RexVisitorBase< T >::visit().

850  {
851  RexUsedInputsVisitor visitor(cat);
852  std::unordered_set<const RexInput*> used_inputs;
853  for (size_t i = 0; i < project->size(); ++i) {
854  const auto proj_inputs = visitor.visit(project->getProjectAt(i));
855  used_inputs.insert(proj_inputs.begin(), proj_inputs.end());
856  }
857  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
858  return std::make_pair(used_inputs, used_inputs_owned);
859 }
size_t size() const override
const RexScalar * getProjectAt(const size_t idx) const
+ Here is the call graph for this function:

◆ get_used_inputs() [4/6]

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelTableFunction table_func,
const Catalog_Namespace::Catalog cat 
)

Definition at line 862 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelTableFunction::getTableFuncInputAt(), RelTableFunction::getTableFuncInputsSize(), and RexVisitorBase< T >::visit().

863  {
864  RexUsedInputsVisitor visitor(cat);
865  std::unordered_set<const RexInput*> used_inputs;
866  for (size_t i = 0; i < table_func->getTableFuncInputsSize(); ++i) {
867  const auto table_func_inputs = visitor.visit(table_func->getTableFuncInputAt(i));
868  used_inputs.insert(table_func_inputs.begin(), table_func_inputs.end());
869  }
870  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
871  return std::make_pair(used_inputs, used_inputs_owned);
872 }
const RexScalar * getTableFuncInputAt(const size_t idx) const
size_t getTableFuncInputsSize() const
+ Here is the call graph for this function:

◆ get_used_inputs() [5/6]

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelFilter filter,
const Catalog_Namespace::Catalog cat 
)

Definition at line 875 of file RelAlgExecutor.cpp.

References CHECK, and get_data_sink().

875  {
876  std::unordered_set<const RexInput*> used_inputs;
877  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
878  const auto data_sink_node = get_data_sink(filter);
879  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
880  const auto source = data_sink_node->getInput(nest_level);
881  const auto scan_source = dynamic_cast<const RelScan*>(source);
882  if (scan_source) {
883  CHECK(source->getOutputMetainfo().empty());
884  for (size_t i = 0; i < scan_source->size(); ++i) {
885  auto synthesized_used_input = new RexInput(scan_source, i);
886  used_inputs_owned.emplace_back(synthesized_used_input);
887  used_inputs.insert(synthesized_used_input);
888  }
889  } else {
890  const auto& partial_in_metadata = source->getOutputMetainfo();
891  for (size_t i = 0; i < partial_in_metadata.size(); ++i) {
892  auto synthesized_used_input = new RexInput(source, i);
893  used_inputs_owned.emplace_back(synthesized_used_input);
894  used_inputs.insert(synthesized_used_input);
895  }
896  }
897  }
898  return std::make_pair(used_inputs, used_inputs_owned);
899 }
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
+ Here is the call graph for this function:

◆ get_used_inputs() [6/6]

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelLogicalUnion logical_union,
const Catalog_Namespace::Catalog  
)

Definition at line 902 of file RelAlgExecutor.cpp.

References RelAlgNode::getInput(), RelAlgNode::inputCount(), and VLOG.

Referenced by get_input_desc().

902  {
903  std::unordered_set<const RexInput*> used_inputs(logical_union->inputCount());
904  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
905  used_inputs_owned.reserve(logical_union->inputCount());
906  VLOG(3) << "logical_union->inputCount()=" << logical_union->inputCount();
907  auto const n_inputs = logical_union->inputCount();
908  for (size_t nest_level = 0; nest_level < n_inputs; ++nest_level) {
909  auto input = logical_union->getInput(nest_level);
910  for (size_t i = 0; i < input->size(); ++i) {
911  used_inputs_owned.emplace_back(std::make_shared<RexInput>(input, i));
912  used_inputs.insert(used_inputs_owned.back().get());
913  }
914  }
915  return std::make_pair(std::move(used_inputs), std::move(used_inputs_owned));
916 }
const size_t inputCount() const
const RelAlgNode * getInput(const size_t idx) const
#define VLOG(n)
Definition: Logger.h:291
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ groups_approx_upper_bound()

size_t anonymous_namespace{RelAlgExecutor.cpp}::groups_approx_upper_bound ( const std::vector< InputTableInfo > &  table_infos)

Upper bound estimation for the number of groups. Not strictly correct and not tight, but if the tables involved are really small we shouldn't waste time doing the NDV estimation. We don't account for cross-joins and / or group by unnested array, which is the reason this estimation isn't entirely reliable.

Definition at line 2554 of file RelAlgExecutor.cpp.

References CHECK.

Referenced by RelAlgExecutor::executeWorkUnit().

2554  {
2555  CHECK(!table_infos.empty());
2556  const auto& first_table = table_infos.front();
2557  size_t max_num_groups = first_table.info.getNumTuplesUpperBound();
2558  for (const auto& table_info : table_infos) {
2559  if (table_info.info.getNumTuplesUpperBound() > max_num_groups) {
2560  max_num_groups = table_info.info.getNumTuplesUpperBound();
2561  }
2562  }
2563  return std::max(max_num_groups, size_t(1));
2564 }
#define CHECK(condition)
Definition: Logger.h:197
+ Here is the caller graph for this function:

◆ insert_one_dict_str() [1/2]

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const std::string &  columnName,
const SQLTypeInfo columnType,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 1983 of file RelAlgExecutor.cpp.

References CHECK, logger::ERROR, SQLTypeInfo::get_comp_param(), Analyzer::Constant::get_constval(), Analyzer::Constant::get_is_null(), Catalog_Namespace::Catalog::getMetadataForDict(), inline_fixed_encoding_null_val(), LOG, Datum::stringval, and omnisci.dtypes::T.

1987  {
1988  if (col_cv->get_is_null()) {
1989  *col_data = inline_fixed_encoding_null_val(columnType);
1990  } else {
1991  const int dict_id = columnType.get_comp_param();
1992  const auto col_datum = col_cv->get_constval();
1993  const auto& str = *col_datum.stringval;
1994  const auto dd = catalog.getMetadataForDict(dict_id);
1995  CHECK(dd && dd->stringDict);
1996  int32_t str_id = dd->stringDict->getOrAdd(str);
1997  if (!dd->dictIsTemp) {
1998  const auto checkpoint_ok = dd->stringDict->checkpoint();
1999  if (!checkpoint_ok) {
2000  throw std::runtime_error("Failed to checkpoint dictionary for column " +
2001  columnName);
2002  }
2003  }
2004  const bool invalid = str_id > max_valid_int_value<T>();
2005  if (invalid || str_id == inline_int_null_value<int32_t>()) {
2006  if (invalid) {
2007  LOG(ERROR) << "Could not encode string: " << str
2008  << ", the encoded value doesn't fit in " << sizeof(T) * 8
2009  << " bits. Will store NULL instead.";
2010  }
2011  str_id = inline_fixed_encoding_null_val(columnType);
2012  }
2013  *col_data = str_id;
2014  }
2015  return *col_data;
2016 }
#define LOG(tag)
Definition: Logger.h:188
HOST DEVICE int get_comp_param() const
Definition: sqltypes.h:268
Datum get_constval() const
Definition: Analyzer.h:335
const DictDescriptor * getMetadataForDict(int dict_ref, bool loadDict=true) const
Definition: Catalog.cpp:1451
std::string * stringval
Definition: sqltypes.h:143
bool get_is_null() const
Definition: Analyzer.h:334
#define CHECK(condition)
Definition: Logger.h:197
int64_t inline_fixed_encoding_null_val(const SQL_TYPE_INFO &ti)
+ Here is the call graph for this function:

◆ insert_one_dict_str() [2/2]

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const ColumnDescriptor cd,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 2019 of file RelAlgExecutor.cpp.

References ColumnDescriptor::columnName, and ColumnDescriptor::columnType.

Referenced by RelAlgExecutor::executeSimpleInsert().

2022  {
2023  return insert_one_dict_str(col_data, cd->columnName, cd->columnType, col_cv, catalog);
2024 }
int64_t insert_one_dict_str(T *col_data, const ColumnDescriptor *cd, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
SQLTypeInfo columnType
std::string columnName
+ Here is the caller graph for this function:

◆ is_agg()

bool anonymous_namespace{RelAlgExecutor.cpp}::is_agg ( const Analyzer::Expr expr)

Definition at line 1325 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_aggtype(), kAVG, kMAX, kMIN, and kSUM.

Referenced by anonymous_namespace{RelAlgDagBuilder.cpp}::create_compound(), RelAlgExecutor::executeWorkUnit(), get_logical_type_for_expr(), and ResultSet::getSingleSlotTargetBitmap().

1325  {
1326  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1327  if (agg_expr && agg_expr->get_contains_agg()) {
1328  auto agg_type = agg_expr->get_aggtype();
1329  if (agg_type == SQLAgg::kMIN || agg_type == SQLAgg::kMAX ||
1330  agg_type == SQLAgg::kSUM || agg_type == SQLAgg::kAVG) {
1331  return true;
1332  }
1333  }
1334  return false;
1335 }
Definition: sqldefs.h:73
Definition: sqldefs.h:75
SQLAgg get_aggtype() const
Definition: Analyzer.h:1095
Definition: sqldefs.h:74
Definition: sqldefs.h:72
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ is_count_distinct()

bool anonymous_namespace{RelAlgExecutor.cpp}::is_count_distinct ( const Analyzer::Expr expr)

Definition at line 1320 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_is_distinct().

Referenced by get_logical_type_for_expr().

1320  {
1321  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1322  return agg_expr && agg_expr->get_is_distinct();
1323 }
bool get_is_distinct() const
Definition: Analyzer.h:1098
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ is_window_execution_unit()

bool anonymous_namespace{RelAlgExecutor.cpp}::is_window_execution_unit ( const RelAlgExecutionUnit ra_exe_unit)

Definition at line 1610 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

1610  {
1611  return std::any_of(ra_exe_unit.target_exprs.begin(),
1612  ra_exe_unit.target_exprs.end(),
1613  [](const Analyzer::Expr* expr) {
1614  return dynamic_cast<const Analyzer::WindowFunction*>(expr);
1615  });
1616 }
std::vector< Analyzer::Expr * > target_exprs
+ Here is the caller graph for this function:

◆ left_deep_join_types()

std::vector<JoinType> anonymous_namespace{RelAlgExecutor.cpp}::left_deep_join_types ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 3211 of file RelAlgExecutor.cpp.

References CHECK_GE, RelLeftDeepInnerJoin::getOuterCondition(), INNER, RelAlgNode::inputCount(), and LEFT.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::translateLeftDeepJoinFilter().

3211  {
3212  CHECK_GE(left_deep_join->inputCount(), size_t(2));
3213  std::vector<JoinType> join_types(left_deep_join->inputCount() - 1, JoinType::INNER);
3214  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
3215  ++nesting_level) {
3216  if (left_deep_join->getOuterCondition(nesting_level)) {
3217  join_types[nesting_level - 1] = JoinType::LEFT;
3218  }
3219  }
3220  return join_types;
3221 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
const RexScalar * getOuterCondition(const size_t nesting_level) const
const size_t inputCount() const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ list_contains_expression()

template<class QualsList >
bool anonymous_namespace{RelAlgExecutor.cpp}::list_contains_expression ( const QualsList &  haystack,
const std::shared_ptr< Analyzer::Expr > &  needle 
)

Definition at line 3390 of file RelAlgExecutor.cpp.

Referenced by reverse_logical_distribution().

3391  {
3392  for (const auto& qual : haystack) {
3393  if (*qual == *needle) {
3394  return true;
3395  }
3396  }
3397  return false;
3398 }
+ Here is the caller graph for this function:

◆ node_is_aggregate()

bool anonymous_namespace{RelAlgExecutor.cpp}::node_is_aggregate ( const RelAlgNode ra)

Definition at line 53 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

53  {
54  const auto compound = dynamic_cast<const RelCompound*>(ra);
55  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
56  return ((compound && compound->isAggregate()) || aggregate);
57 }
+ Here is the caller graph for this function:

◆ reverse_logical_distribution()

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::reverse_logical_distribution ( const std::shared_ptr< Analyzer::Expr > &  expr)

Definition at line 3403 of file RelAlgExecutor.cpp.

References build_logical_expression(), CHECK_GE, kAND, kONE, kOR, list_contains_expression(), Parser::OperExpr::normalize(), qual_to_conjunctive_form(), and qual_to_disjunctive_form().

Referenced by RelAlgExecutor::makeJoinQuals().

3404  {
3405  const auto expr_terms = qual_to_disjunctive_form(expr);
3406  CHECK_GE(expr_terms.size(), size_t(1));
3407  const auto& first_term = expr_terms.front();
3408  const auto first_term_factors = qual_to_conjunctive_form(first_term);
3409  std::vector<std::shared_ptr<Analyzer::Expr>> common_factors;
3410  // First, collect the conjunctive components common to all the disjunctive components.
3411  // Don't do it for simple qualifiers, we only care about expensive or join qualifiers.
3412  for (const auto& first_term_factor : first_term_factors.quals) {
3413  bool is_common =
3414  expr_terms.size() > 1; // Only report common factors for disjunction.
3415  for (size_t i = 1; i < expr_terms.size(); ++i) {
3416  const auto crt_term_factors = qual_to_conjunctive_form(expr_terms[i]);
3417  if (!list_contains_expression(crt_term_factors.quals, first_term_factor)) {
3418  is_common = false;
3419  break;
3420  }
3421  }
3422  if (is_common) {
3423  common_factors.push_back(first_term_factor);
3424  }
3425  }
3426  if (common_factors.empty()) {
3427  return expr;
3428  }
3429  // Now that the common expressions are known, collect the remaining expressions.
3430  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_terms;
3431  for (const auto& term : expr_terms) {
3432  const auto term_cf = qual_to_conjunctive_form(term);
3433  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_quals(
3434  term_cf.simple_quals.begin(), term_cf.simple_quals.end());
3435  for (const auto& qual : term_cf.quals) {
3436  if (!list_contains_expression(common_factors, qual)) {
3437  remaining_quals.push_back(qual);
3438  }
3439  }
3440  if (!remaining_quals.empty()) {
3441  remaining_terms.push_back(build_logical_expression(remaining_quals, kAND));
3442  }
3443  }
3444  // Reconstruct the expression with the transformation applied.
3445  const auto common_expr = build_logical_expression(common_factors, kAND);
3446  if (remaining_terms.empty()) {
3447  return common_expr;
3448  }
3449  const auto remaining_expr = build_logical_expression(remaining_terms, kOR);
3450  return Parser::OperExpr::normalize(kAND, kONE, common_expr, remaining_expr);
3451 }
Definition: sqldefs.h:38
#define CHECK_GE(x, y)
Definition: Logger.h:210
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
bool list_contains_expression(const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
Definition: sqldefs.h:37
std::shared_ptr< Analyzer::Expr > build_logical_expression(const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:270
Definition: sqldefs.h:69
std::vector< std::shared_ptr< Analyzer::Expr > > qual_to_disjunctive_form(const std::shared_ptr< Analyzer::Expr > &qual_expr)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ rewrite_quals()

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::rewrite_quals ( const std::list< std::shared_ptr< Analyzer::Expr >> &  quals)

Definition at line 3266 of file RelAlgExecutor.cpp.

References rewrite_expr().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

3267  {
3268  std::list<std::shared_ptr<Analyzer::Expr>> rewritten_quals;
3269  for (const auto& qual : quals) {
3270  const auto rewritten_qual = rewrite_expr(qual.get());
3271  rewritten_quals.push_back(rewritten_qual ? rewritten_qual : qual);
3272  }
3273  return rewritten_quals;
3274 }
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ rex_to_conjunctive_form()

std::vector<const RexScalar*> anonymous_namespace{RelAlgExecutor.cpp}::rex_to_conjunctive_form ( const RexScalar qual_expr)

Definition at line 3363 of file RelAlgExecutor.cpp.

References CHECK, CHECK_GE, and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3363  {
3364  CHECK(qual_expr);
3365  const auto bin_oper = dynamic_cast<const RexOperator*>(qual_expr);
3366  if (!bin_oper || bin_oper->getOperator() != kAND) {
3367  return {qual_expr};
3368  }
3369  CHECK_GE(bin_oper->size(), size_t(2));
3370  auto lhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(0));
3371  for (size_t i = 1; i < bin_oper->size(); ++i) {
3372  const auto rhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(i));
3373  lhs_cf.insert(lhs_cf.end(), rhs_cf.begin(), rhs_cf.end());
3374  }
3375  return lhs_cf;
3376 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::vector< const RexScalar * > rex_to_conjunctive_form(const RexScalar *qual_expr)
Definition: sqldefs.h:37
#define CHECK(condition)
Definition: Logger.h:197
+ Here is the caller graph for this function:

◆ scalar_at() [1/3]

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelCompound compound 
)

Definition at line 1108 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSource().

1108  {
1109  return compound->getScalarSource(i);
1110 }
const RexScalar * getScalarSource(const size_t i) const
+ Here is the call graph for this function:

◆ scalar_at() [2/3]

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelProject project 
)

Definition at line 1112 of file RelAlgExecutor.cpp.

References RelProject::getProjectAt().

1112  {
1113  return project->getProjectAt(i);
1114 }
const RexScalar * getProjectAt(const size_t idx) const
+ Here is the call graph for this function:

◆ scalar_at() [3/3]

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelTableFunction table_func 
)

Definition at line 1116 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputAt().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1116  {
1117  return table_func->getTableFuncInputAt(i);
1118 }
const RexScalar * getTableFuncInputAt(const size_t idx) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ set_transient_dict()

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict ( const std::shared_ptr< Analyzer::Expr expr)

Definition at line 1120 of file RelAlgExecutor.cpp.

References kENCODING_DICT, kENCODING_NONE, and TRANSIENT_DICT_ID.

Referenced by set_transient_dict_maybe(), translate_groupby_exprs(), and translate_targets().

1121  {
1122  const auto& ti = expr->get_type_info();
1123  if (!ti.is_string() || ti.get_compression() != kENCODING_NONE) {
1124  return expr;
1125  }
1126  auto transient_dict_ti = ti;
1127  transient_dict_ti.set_compression(kENCODING_DICT);
1128  transient_dict_ti.set_comp_param(TRANSIENT_DICT_ID);
1129  transient_dict_ti.set_fixed_size();
1130  return expr->add_cast(transient_dict_ti);
1131 }
#define TRANSIENT_DICT_ID
Definition: sqltypes.h:198
+ Here is the caller graph for this function:

◆ set_transient_dict_maybe()

void anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict_maybe ( std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::shared_ptr< Analyzer::Expr > &  expr 
)

Definition at line 1133 of file RelAlgExecutor.cpp.

References fold_expr(), and set_transient_dict().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1135  {
1136  try {
1137  scalar_sources.push_back(set_transient_dict(fold_expr(expr.get())));
1138  } catch (...) {
1139  scalar_sources.push_back(fold_expr(expr.get()));
1140  }
1141 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ synthesize_inputs()

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::synthesize_inputs ( const RelAlgNode ra_node,
const size_t  nest_level,
const std::vector< TargetMetaInfo > &  in_metainfo,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3524 of file RelAlgExecutor.cpp.

References CHECK, CHECK_GE, CHECK_LE, RelAlgNode::getInput(), RelAlgNode::inputCount(), and table_id_from_ra().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and get_inputs_meta().

3528  {
3529  CHECK_LE(size_t(1), ra_node->inputCount());
3530  CHECK_GE(size_t(2), ra_node->inputCount());
3531  const auto input = ra_node->getInput(nest_level);
3532  const auto it_rte_idx = input_to_nest_level.find(input);
3533  CHECK(it_rte_idx != input_to_nest_level.end());
3534  const int rte_idx = it_rte_idx->second;
3535  const int table_id = table_id_from_ra(input);
3536  std::vector<std::shared_ptr<Analyzer::Expr>> inputs;
3537  const auto scan_ra = dynamic_cast<const RelScan*>(input);
3538  int input_idx = 0;
3539  for (const auto& input_meta : in_metainfo) {
3540  inputs.push_back(
3541  std::make_shared<Analyzer::ColumnVar>(input_meta.get_type_info(),
3542  table_id,
3543  scan_ra ? input_idx + 1 : input_idx,
3544  rte_idx));
3545  ++input_idx;
3546  }
3547  return inputs;
3548 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
#define CHECK_LE(x, y)
Definition: Logger.h:208
const size_t inputCount() const
int table_id_from_ra(const RelAlgNode *ra_node)
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * getInput(const size_t idx) const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ table_id_from_ra()

int anonymous_namespace{RelAlgExecutor.cpp}::table_id_from_ra ( const RelAlgNode ra_node)

Definition at line 918 of file RelAlgExecutor.cpp.

References CHECK, RelAlgNode::getId(), and RelScan::getTableDescriptor().

Referenced by collect_used_input_desc(), get_input_desc_impl(), and synthesize_inputs().

918  {
919  const auto scan_ra = dynamic_cast<const RelScan*>(ra_node);
920  if (scan_ra) {
921  const auto td = scan_ra->getTableDescriptor();
922  CHECK(td);
923  return td->tableId;
924  }
925  return -ra_node->getId();
926 }
const TableDescriptor * getTableDescriptor() const
unsigned getId() const
#define CHECK(condition)
Definition: Logger.h:197
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ target_exprs_for_union()

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::target_exprs_for_union ( RelAlgNode const *  input_node)

Definition at line 3678 of file RelAlgExecutor.cpp.

References RelAlgNode::getId(), RelAlgNode::getOutputMetainfo(), shared::printContainer(), and VLOG.

Referenced by RelAlgExecutor::createUnionWorkUnit().

3679  {
3680  std::vector<TargetMetaInfo> const& tmis = input_node->getOutputMetainfo();
3681  VLOG(3) << "input_node->getOutputMetainfo()=" << shared::printContainer(tmis);
3682  const int negative_node_id = -input_node->getId();
3683  std::vector<std::shared_ptr<Analyzer::Expr>> target_exprs;
3684  target_exprs.reserve(tmis.size());
3685  for (size_t i = 0; i < tmis.size(); ++i) {
3686  target_exprs.push_back(std::make_shared<Analyzer::ColumnVar>(
3687  tmis[i].get_type_info(), negative_node_id, i, 0));
3688  }
3689  return target_exprs;
3690 }
PrintContainer< CONTAINER > printContainer(CONTAINER &container)
Definition: misc.h:64
#define VLOG(n)
Definition: Logger.h:291
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ transform_to_inner()

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::transform_to_inner ( const Analyzer::Expr expr)

Definition at line 1703 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::computeWindow().

1703  {
1704  const auto tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(expr);
1705  if (tuple) {
1706  std::vector<std::shared_ptr<Analyzer::Expr>> transformed_tuple;
1707  for (const auto& element : tuple->getTuple()) {
1708  transformed_tuple.push_back(transform_to_inner(element.get()));
1709  }
1710  return makeExpr<Analyzer::ExpressionTuple>(transformed_tuple);
1711  }
1712  const auto col = dynamic_cast<const Analyzer::ColumnVar*>(expr);
1713  if (!col) {
1714  throw std::runtime_error("Only columns supported in the window partition for now");
1715  }
1716  return makeExpr<Analyzer::ColumnVar>(
1717  col->get_type_info(), col->get_table_id(), col->get_column_id(), 1);
1718 }
std::shared_ptr< Analyzer::Expr > transform_to_inner(const Analyzer::Expr *expr)
+ Here is the caller graph for this function:

◆ translate_groupby_exprs() [1/2]

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelCompound compound,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1216 of file RelAlgExecutor.cpp.

References RelCompound::getGroupByCount(), RelCompound::isAggregate(), and set_transient_dict().

1218  {
1219  if (!compound->isAggregate()) {
1220  return {nullptr};
1221  }
1222  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1223  for (size_t group_idx = 0; group_idx < compound->getGroupByCount(); ++group_idx) {
1224  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1225  }
1226  return groupby_exprs;
1227 }
const size_t getGroupByCount() const
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
bool isAggregate() const
+ Here is the call graph for this function:

◆ translate_groupby_exprs() [2/2]

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelAggregate aggregate,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1229 of file RelAlgExecutor.cpp.

References RelAggregate::getGroupByCount(), and set_transient_dict().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1231  {
1232  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1233  for (size_t group_idx = 0; group_idx < aggregate->getGroupByCount(); ++group_idx) {
1234  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1235  }
1236  return groupby_exprs;
1237 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
const size_t getGroupByCount() const
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ translate_quals()

QualsConjunctiveForm anonymous_namespace{RelAlgExecutor.cpp}::translate_quals ( const RelCompound compound,
const RelAlgTranslator translator 
)

Definition at line 1239 of file RelAlgExecutor.cpp.

References fold_expr(), RelCompound::getFilterExpr(), qual_to_conjunctive_form(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

1240  {
1241  const auto filter_rex = compound->getFilterExpr();
1242  const auto filter_expr =
1243  filter_rex ? translator.translateScalarRex(filter_rex) : nullptr;
1244  return filter_expr ? qual_to_conjunctive_form(fold_expr(filter_expr.get()))
1246 }
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
const RexScalar * getFilterExpr() const
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ translate_scalar_sources()

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources ( const RA *  ra_node,
const RelAlgTranslator translator,
const ::ExecutorType  executor_type 
)

Definition at line 1153 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), fold_expr(), get_scalar_sources_size(), Native, rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), RelAlgTranslator::translateScalarRex(), and VLOG.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::createTableFunctionWorkUnit().

1156  {
1157  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1158  const size_t scalar_sources_size = get_scalar_sources_size(ra_node);
1159  VLOG(3) << "get_scalar_sources_size(" << ra_node->toString()
1160  << ") = " << scalar_sources_size;
1161  for (size_t i = 0; i < scalar_sources_size; ++i) {
1162  const auto scalar_rex = scalar_at(i, ra_node);
1163  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1164  // RexRef are synthetic scalars we append at the end of the real ones
1165  // for the sake of taking memory ownership, no real work needed here.
1166  continue;
1167  }
1168 
1169  const auto scalar_expr =
1170  rewrite_array_elements(translator.translateScalarRex(scalar_rex).get());
1171  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1172  if (executor_type == ExecutorType::Native) {
1173  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1174  } else {
1175  scalar_sources.push_back(cast_dict_to_none(fold_expr(rewritten_expr.get())));
1176  }
1177  }
1178 
1179  return scalar_sources;
1180 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
const RexScalar * scalar_at(const size_t i, const RelTableFunction *table_func)
size_t get_scalar_sources_size(const RelTableFunction *table_func)
#define VLOG(n)
Definition: Logger.h:291
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ translate_scalar_sources_for_update()

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources_for_update ( const RA *  ra_node,
const RelAlgTranslator translator,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const ColumnNameList colNames,
size_t  starting_projection_column_idx 
)

Definition at line 1183 of file RelAlgExecutor.cpp.

References cat(), get_scalar_sources_size(), rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

1189  {
1190  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1191  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
1192  const auto scalar_rex = scalar_at(i, ra_node);
1193  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1194  // RexRef are synthetic scalars we append at the end of the real ones
1195  // for the sake of taking memory ownership, no real work needed here.
1196  continue;
1197  }
1198 
1199  std::shared_ptr<Analyzer::Expr> translated_expr;
1200  if (i >= starting_projection_column_idx && i < get_scalar_sources_size(ra_node) - 1) {
1201  translated_expr = cast_to_column_type(translator.translateScalarRex(scalar_rex),
1202  tableId,
1203  cat,
1204  colNames[i - starting_projection_column_idx]);
1205  } else {
1206  translated_expr = translator.translateScalarRex(scalar_rex);
1207  }
1208  const auto scalar_expr = rewrite_array_elements(translated_expr.get());
1209  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1210  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1211  }
1212 
1213  return scalar_sources;
1214 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::string cat(Ts &&... args)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
const RexScalar * scalar_at(const size_t i, const RelTableFunction *table_func)
size_t get_scalar_sources_size(const RelTableFunction *table_func)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
+ Here is the call graph for this function:

◆ translate_targets() [1/2]

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelCompound compound,
const RelAlgTranslator translator,
const ExecutorType  executor_type 
)

Definition at line 1248 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), CHECK, CHECK_GE, CHECK_LE, fold_expr(), RexRef::getIndex(), RelCompound::getTargetExpr(), Analyzer::Var::kGROUPBY, Native, rewrite_expr(), set_transient_dict(), RelCompound::size(), RelAlgTranslator::translateAggregateRex(), RelAlgTranslator::translateScalarRex(), and var_ref().

1254  {
1255  std::vector<Analyzer::Expr*> target_exprs;
1256  for (size_t i = 0; i < compound->size(); ++i) {
1257  const auto target_rex = compound->getTargetExpr(i);
1258  const auto target_rex_agg = dynamic_cast<const RexAgg*>(target_rex);
1259  std::shared_ptr<Analyzer::Expr> target_expr;
1260  if (target_rex_agg) {
1261  target_expr =
1262  RelAlgTranslator::translateAggregateRex(target_rex_agg, scalar_sources);
1263  } else {
1264  const auto target_rex_scalar = dynamic_cast<const RexScalar*>(target_rex);
1265  const auto target_rex_ref = dynamic_cast<const RexRef*>(target_rex_scalar);
1266  if (target_rex_ref) {
1267  const auto ref_idx = target_rex_ref->getIndex();
1268  CHECK_GE(ref_idx, size_t(1));
1269  CHECK_LE(ref_idx, groupby_exprs.size());
1270  const auto groupby_expr = *std::next(groupby_exprs.begin(), ref_idx - 1);
1271  target_expr = var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, ref_idx);
1272  } else {
1273  target_expr = translator.translateScalarRex(target_rex_scalar);
1274  auto rewritten_expr = rewrite_expr(target_expr.get());
1275  target_expr = fold_expr(rewritten_expr.get());
1276  if (executor_type == ExecutorType::Native) {
1277  try {
1278  target_expr = set_transient_dict(target_expr);
1279  } catch (...) {
1280  // noop
1281  }
1282  } else {
1283  target_expr = cast_dict_to_none(target_expr);
1284  }
1285  }
1286  }
1287  CHECK(target_expr);
1288  target_exprs_owned.push_back(target_expr);
1289  target_exprs.push_back(target_expr.get());
1290  }
1291  return target_exprs;
1292 }
size_t size() const override
const Rex * getTargetExpr(const size_t i) const
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1669
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
size_t getIndex() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
#define CHECK_LE(x, y)
Definition: Logger.h:208
#define CHECK(condition)
Definition: Logger.h:197
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
+ Here is the call graph for this function:

◆ translate_targets() [2/2]

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelAggregate aggregate,
const RelAlgTranslator translator 
)

Definition at line 1294 of file RelAlgExecutor.cpp.

References CHECK, fold_expr(), RelAggregate::getAggExprs(), Analyzer::Var::kGROUPBY, RelAlgTranslator::translateAggregateRex(), and var_ref().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1299  {
1300  std::vector<Analyzer::Expr*> target_exprs;
1301  size_t group_key_idx = 1;
1302  for (const auto& groupby_expr : groupby_exprs) {
1303  auto target_expr =
1304  var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, group_key_idx++);
1305  target_exprs_owned.push_back(target_expr);
1306  target_exprs.push_back(target_expr.get());
1307  }
1308 
1309  for (const auto& target_rex_agg : aggregate->getAggExprs()) {
1310  auto target_expr =
1311  RelAlgTranslator::translateAggregateRex(target_rex_agg.get(), scalar_sources);
1312  CHECK(target_expr);
1313  target_expr = fold_expr(target_expr.get());
1314  target_exprs_owned.push_back(target_expr);
1315  target_exprs.push_back(target_expr.get());
1316  }
1317  return target_exprs;
1318 }
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1669
#define CHECK(condition)
Definition: Logger.h:197
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
+ Here is the call graph for this function:
+ Here is the caller graph for this function: