OmniSciDB  06b3bd477c
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
anonymous_namespace{RelAlgExecutor.cpp} Namespace Reference

Classes

struct  ExecutorMutexHolder
 
class  RexUsedInputsVisitor
 

Functions

bool node_is_aggregate (const RelAlgNode *ra)
 
std::unordered_set< PhysicalInputget_physical_inputs (const Catalog_Namespace::Catalog &cat, const RelAlgNode *ra)
 
void check_sort_node_source_constraint (const RelSort *sort)
 
const RelAlgNodeget_data_sink (const RelAlgNode *ra_node)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelAggregate *aggregate, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelProject *project, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelTableFunction *table_func, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelFilter *filter, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelLogicalUnion *logical_union, const Catalog_Namespace::Catalog &)
 
int table_id_from_ra (const RelAlgNode *ra_node)
 
std::unordered_map< const
RelAlgNode *, int > 
get_input_nest_levels (const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_join_source_used_inputs (const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
 
void collect_used_input_desc (std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
template<class RA >
std::pair< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor > > > 
get_input_desc_impl (const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
template<class RA >
std::tuple< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor >
>, std::vector
< std::shared_ptr< RexInput > > > 
get_input_desc (const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
size_t get_scalar_sources_size (const RelCompound *compound)
 
size_t get_scalar_sources_size (const RelProject *project)
 
size_t get_scalar_sources_size (const RelTableFunction *table_func)
 
const RexScalarscalar_at (const size_t i, const RelCompound *compound)
 
const RexScalarscalar_at (const size_t i, const RelProject *project)
 
const RexScalarscalar_at (const size_t i, const RelTableFunction *table_func)
 
std::shared_ptr< Analyzer::Exprset_transient_dict (const std::shared_ptr< Analyzer::Expr > expr)
 
void set_transient_dict_maybe (std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
 
std::shared_ptr< Analyzer::Exprcast_dict_to_none (const std::shared_ptr< Analyzer::Expr > &input)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources (const RA *ra_node, const RelAlgTranslator &translator, const ::ExecutorType executor_type)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources_for_update (const RA *ra_node, const RelAlgTranslator &translator, int32_t tableId, const Catalog_Namespace::Catalog &cat, const ColumnNameList &colNames, size_t starting_projection_column_idx)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelCompound *compound, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelAggregate *aggregate, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
QualsConjunctiveForm translate_quals (const RelCompound *compound, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelCompound *compound, const RelAlgTranslator &translator, const ExecutorType executor_type)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelAggregate *aggregate, const RelAlgTranslator &translator)
 
bool is_count_distinct (const Analyzer::Expr *expr)
 
bool is_agg (const Analyzer::Expr *expr)
 
SQLTypeInfo get_logical_type_for_expr (const Analyzer::Expr &expr)
 
template<class RA >
std::vector< TargetMetaInfoget_targets_meta (const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
 
template<>
std::vector< TargetMetaInfoget_targets_meta (const RelFilter *filter, const std::vector< Analyzer::Expr * > &target_exprs)
 
bool is_window_execution_unit (const RelAlgExecutionUnit &ra_exe_unit)
 
std::shared_ptr< Analyzer::Exprtransform_to_inner (const Analyzer::Expr *expr)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const ColumnDescriptor *cd, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
std::list< Analyzer::OrderEntryget_order_entries (const RelSort *sort)
 
size_t get_scan_limit (const RelAlgNode *ra, const size_t limit)
 
bool first_oe_is_desc (const std::list< Analyzer::OrderEntry > &order_entries)
 
size_t groups_approx_upper_bound (const std::vector< InputTableInfo > &table_infos)
 
bool compute_output_buffer_size (const RelAlgExecutionUnit &ra_exe_unit)
 
bool exe_unit_has_quals (const RelAlgExecutionUnit ra_exe_unit)
 
RelAlgExecutionUnit decide_approx_count_distinct_implementation (const RelAlgExecutionUnit &ra_exe_unit_in, const std::vector< InputTableInfo > &table_infos, const Executor *executor, const ExecutorDeviceType device_type_in, std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned)
 
void build_render_targets (RenderInfo &render_info, const std::vector< Analyzer::Expr * > &work_unit_target_exprs, const std::vector< TargetMetaInfo > &targets_meta)
 
bool can_use_bump_allocator (const RelAlgExecutionUnit &ra_exe_unit, const CompilationOptions &co, const ExecutionOptions &eo)
 
JoinType get_join_type (const RelAlgNode *ra)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals (const RexScalar *scalar)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals_conjunction (const RexScalar *scalar)
 
std::vector< JoinTypeleft_deep_join_types (const RelLeftDeepInnerJoin *left_deep_join)
 
template<class RA >
std::vector< size_t > do_table_reordering (std::vector< InputDescriptor > &input_descs, std::list< std::shared_ptr< const InputColDescriptor >> &input_col_descs, const JoinQualsPerNestingLevel &left_deep_join_quals, std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const RA *node, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::vector< size_t > get_left_deep_join_input_sizes (const RelLeftDeepInnerJoin *left_deep_join)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
rewrite_quals (const std::list< std::shared_ptr< Analyzer::Expr >> &quals)
 
std::vector< const RexScalar * > rex_to_conjunctive_form (const RexScalar *qual_expr)
 
std::shared_ptr< Analyzer::Exprbuild_logical_expression (const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
 
template<class QualsList >
bool list_contains_expression (const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
 
std::shared_ptr< Analyzer::Exprreverse_logical_distribution (const std::shared_ptr< Analyzer::Expr > &expr)
 
std::vector< std::shared_ptr
< Analyzer::Expr > > 
synthesize_inputs (const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
std::vector< std::shared_ptr
< Analyzer::Expr > > 
target_exprs_for_union (RelAlgNode const *input_node)
 
std::pair< std::vector
< TargetMetaInfo >
, std::vector< std::shared_ptr
< Analyzer::Expr > > > 
get_inputs_meta (const RelFilter *filter, const RelAlgTranslator &translator, const std::vector< std::shared_ptr< RexInput >> &inputs_owned, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 

Function Documentation

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::build_logical_expression ( const std::vector< std::shared_ptr< Analyzer::Expr >> &  factors,
const SQLOps  sql_op 
)

Definition at line 3355 of file RelAlgExecutor.cpp.

References CHECK(), kONE, and Parser::OperExpr::normalize().

Referenced by reverse_logical_distribution().

3357  {
3358  CHECK(!factors.empty());
3359  auto acc = factors.front();
3360  for (size_t i = 1; i < factors.size(); ++i) {
3361  acc = Parser::OperExpr::normalize(sql_op, kONE, acc, factors[i]);
3362  }
3363  return acc;
3364 }
CHECK(cgen_state)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:272
Definition: sqldefs.h:69

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::build_render_targets ( RenderInfo render_info,
const std::vector< Analyzer::Expr * > &  work_unit_target_exprs,
const std::vector< TargetMetaInfo > &  targets_meta 
)

Definition at line 2632 of file RelAlgExecutor.cpp.

References CHECK_EQ, and RenderInfo::targets.

Referenced by RelAlgExecutor::executeWorkUnit().

2634  {
2635  CHECK_EQ(work_unit_target_exprs.size(), targets_meta.size());
2636  render_info.targets.clear();
2637  for (size_t i = 0; i < targets_meta.size(); ++i) {
2638  render_info.targets.emplace_back(std::make_shared<Analyzer::TargetEntry>(
2639  targets_meta[i].get_resname(),
2640  work_unit_target_exprs[i]->get_shared_ptr(),
2641  false));
2642  }
2643 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::vector< std::shared_ptr< Analyzer::TargetEntry > > targets
Definition: RenderInfo.h:37

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::can_use_bump_allocator ( const RelAlgExecutionUnit ra_exe_unit,
const CompilationOptions co,
const ExecutionOptions eo 
)
inline

Definition at line 2645 of file RelAlgExecutor.cpp.

References CompilationOptions::device_type, g_enable_bump_allocator, GPU, SortInfo::order_entries, ExecutionOptions::output_columnar_hint, and RelAlgExecutionUnit::sort_info.

Referenced by RelAlgExecutor::executeWorkUnit().

2647  {
2649  !eo.output_columnar_hint && ra_exe_unit.sort_info.order_entries.empty();
2650 }
const std::list< Analyzer::OrderEntry > order_entries
const SortInfo sort_info
ExecutorDeviceType device_type
bool g_enable_bump_allocator
Definition: Execute.cpp:104

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::cast_dict_to_none ( const std::shared_ptr< Analyzer::Expr > &  input)

Definition at line 1119 of file RelAlgExecutor.cpp.

References kENCODING_DICT, and kTEXT.

Referenced by translate_scalar_sources(), and translate_targets().

1120  {
1121  const auto& input_ti = input->get_type_info();
1122  if (input_ti.is_string() && input_ti.get_compression() == kENCODING_DICT) {
1123  return input->add_cast(SQLTypeInfo(kTEXT, input_ti.get_notnull()));
1124  }
1125  return input;
1126 }
Definition: sqltypes.h:53

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::check_sort_node_source_constraint ( const RelSort sort)
inline

Definition at line 402 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

402  {
403  CHECK_EQ(size_t(1), sort->inputCount());
404  const auto source = sort->getInput(0);
405  if (dynamic_cast<const RelSort*>(source)) {
406  throw std::runtime_error("Sort node not supported as input to another sort");
407  }
408 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::collect_used_input_desc ( std::vector< InputDescriptor > &  input_descs,
const Catalog_Namespace::Catalog cat,
std::unordered_set< std::shared_ptr< const InputColDescriptor >> &  input_col_descs_unique,
const RelAlgNode ra_node,
const std::unordered_set< const RexInput * > &  source_used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 967 of file RelAlgExecutor.cpp.

References Catalog_Namespace::Catalog::getColumnIdBySpi(), table_id_from_ra(), RelAlgNode::toString(), and VLOG.

Referenced by get_input_desc_impl().

973  {
974  VLOG(3) << "ra_node=" << ra_node->toString()
975  << " input_col_descs_unique.size()=" << input_col_descs_unique.size()
976  << " source_used_inputs.size()=" << source_used_inputs.size();
977  for (const auto used_input : source_used_inputs) {
978  const auto input_ra = used_input->getSourceNode();
979  const int table_id = table_id_from_ra(input_ra);
980  const auto col_id = used_input->getIndex();
981  auto it = input_to_nest_level.find(input_ra);
982  if (it != input_to_nest_level.end()) {
983  const int input_desc = it->second;
984  input_col_descs_unique.insert(std::make_shared<const InputColDescriptor>(
985  dynamic_cast<const RelScan*>(input_ra)
986  ? cat.getColumnIdBySpi(table_id, col_id + 1)
987  : col_id,
988  table_id,
989  input_desc));
990  } else if (!dynamic_cast<const RelLogicalUnion*>(ra_node)) {
991  throw std::runtime_error("Bushy joins not supported");
992  }
993  }
994 }
int table_id_from_ra(const RelAlgNode *ra_node)
virtual std::string toString() const =0
#define VLOG(n)
Definition: Logger.h:291
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1540

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::compute_output_buffer_size ( const RelAlgExecutionUnit ra_exe_unit)

Determines whether a query needs to compute the size of its output buffer. Returns true for projection queries with no LIMIT or a LIMIT that exceeds the high scan limit threshold (meaning it would be cheaper to compute the number of rows passing or use the bump allocator than allocate the current scan limit per GPU)

Definition at line 2544 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::groupby_exprs, Executor::high_scan_limit, RelAlgExecutionUnit::scan_limit, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

2544  {
2545  for (const auto target_expr : ra_exe_unit.target_exprs) {
2546  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
2547  return false;
2548  }
2549  }
2550  if (ra_exe_unit.groupby_exprs.size() == 1 && !ra_exe_unit.groupby_exprs.front() &&
2551  (!ra_exe_unit.scan_limit || ra_exe_unit.scan_limit > Executor::high_scan_limit)) {
2552  return true;
2553  }
2554  return false;
2555 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
static const size_t high_scan_limit
Definition: Execute.h:374

+ Here is the caller graph for this function:

RelAlgExecutionUnit anonymous_namespace{RelAlgExecutor.cpp}::decide_approx_count_distinct_implementation ( const RelAlgExecutionUnit ra_exe_unit_in,
const std::vector< InputTableInfo > &  table_infos,
const Executor executor,
const ExecutorDeviceType  device_type_in,
std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned 
)

Definition at line 2562 of file RelAlgExecutor.cpp.

References Bitmap, CHECK(), CHECK_GE, g_bigint_count, g_cluster, g_hll_precision_bits, get_agg_type(), get_count_distinct_sub_bitmap_count(), get_target_info(), getExpressionRange(), GPU, hll_size_for_rate(), Integer, kAPPROX_COUNT_DISTINCT, kCOUNT, kENCODING_DICT, kINT, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit(), and RelAlgExecutor::handleOutOfMemoryRetry().

2567  {
2568  RelAlgExecutionUnit ra_exe_unit = ra_exe_unit_in;
2569  for (size_t i = 0; i < ra_exe_unit.target_exprs.size(); ++i) {
2570  const auto target_expr = ra_exe_unit.target_exprs[i];
2571  const auto agg_info = get_target_info(target_expr, g_bigint_count);
2572  if (agg_info.agg_kind != kAPPROX_COUNT_DISTINCT) {
2573  continue;
2574  }
2575  CHECK(dynamic_cast<const Analyzer::AggExpr*>(target_expr));
2576  const auto arg = static_cast<Analyzer::AggExpr*>(target_expr)->get_own_arg();
2577  CHECK(arg);
2578  const auto& arg_ti = arg->get_type_info();
2579  // Avoid calling getExpressionRange for variable length types (string and array),
2580  // it'd trigger an assertion since that API expects to be called only for types
2581  // for which the notion of range is well-defined. A bit of a kludge, but the
2582  // logic to reject these types anyway is at lower levels in the stack and not
2583  // really worth pulling into a separate function for now.
2584  if (!(arg_ti.is_number() || arg_ti.is_boolean() || arg_ti.is_time() ||
2585  (arg_ti.is_string() && arg_ti.get_compression() == kENCODING_DICT))) {
2586  continue;
2587  }
2588  const auto arg_range = getExpressionRange(arg.get(), table_infos, executor);
2589  if (arg_range.getType() != ExpressionRangeType::Integer) {
2590  continue;
2591  }
2592  // When running distributed, the threshold for using the precise implementation
2593  // must be consistent across all leaves, otherwise we could have a mix of precise
2594  // and approximate bitmaps and we cannot aggregate them.
2595  const auto device_type = g_cluster ? ExecutorDeviceType::GPU : device_type_in;
2596  const auto bitmap_sz_bits = arg_range.getIntMax() - arg_range.getIntMin() + 1;
2597  const auto sub_bitmap_count =
2598  get_count_distinct_sub_bitmap_count(bitmap_sz_bits, ra_exe_unit, device_type);
2599  int64_t approx_bitmap_sz_bits{0};
2600  const auto error_rate =
2601  static_cast<Analyzer::AggExpr*>(target_expr)->get_error_rate();
2602  if (error_rate) {
2603  CHECK(error_rate->get_type_info().get_type() == kINT);
2604  CHECK_GE(error_rate->get_constval().intval, 1);
2605  approx_bitmap_sz_bits = hll_size_for_rate(error_rate->get_constval().intval);
2606  } else {
2607  approx_bitmap_sz_bits = g_hll_precision_bits;
2608  }
2609  CountDistinctDescriptor approx_count_distinct_desc{CountDistinctImplType::Bitmap,
2610  arg_range.getIntMin(),
2611  approx_bitmap_sz_bits,
2612  true,
2613  device_type,
2614  sub_bitmap_count};
2615  CountDistinctDescriptor precise_count_distinct_desc{CountDistinctImplType::Bitmap,
2616  arg_range.getIntMin(),
2617  bitmap_sz_bits,
2618  false,
2619  device_type,
2620  sub_bitmap_count};
2621  if (approx_count_distinct_desc.bitmapPaddedSizeBytes() >=
2622  precise_count_distinct_desc.bitmapPaddedSizeBytes()) {
2623  auto precise_count_distinct = makeExpr<Analyzer::AggExpr>(
2624  get_agg_type(kCOUNT, arg.get()), kCOUNT, arg, true, nullptr);
2625  target_exprs_owned.push_back(precise_count_distinct);
2626  ra_exe_unit.target_exprs[i] = precise_count_distinct.get();
2627  }
2628  }
2629  return ra_exe_unit;
2630 }
std::vector< Analyzer::Expr * > target_exprs
int hll_size_for_rate(const int err_percent)
Definition: HyperLogLog.h:115
TargetInfo get_target_info(const PointerType target_expr, const bool bigint_count)
Definition: TargetInfo.h:78
#define CHECK_GE(x, y)
Definition: Logger.h:210
SQLTypeInfo get_agg_type(const SQLAgg agg_kind, const Analyzer::Expr *arg_expr)
int g_hll_precision_bits
size_t get_count_distinct_sub_bitmap_count(const size_t bitmap_sz_bits, const RelAlgExecutionUnit &ra_exe_unit, const ExecutorDeviceType device_type)
CHECK(cgen_state)
bool g_bigint_count
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)
Definition: sqldefs.h:76
bool g_cluster
Definition: sqltypes.h:46

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::do_table_reordering ( std::vector< InputDescriptor > &  input_descs,
std::list< std::shared_ptr< const InputColDescriptor >> &  input_col_descs,
const JoinQualsPerNestingLevel left_deep_join_quals,
std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const RA *  node,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 3201 of file RelAlgExecutor.cpp.

References cat(), CHECK(), g_cluster, get_input_desc(), get_input_nest_levels(), get_node_input_permutation(), and table_is_replicated().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3208  {
3209  if (g_cluster) {
3210  // Disable table reordering in distributed mode. The aggregator does not have enough
3211  // information to break ties
3212  return {};
3213  }
3214  const auto& cat = *executor->getCatalog();
3215  for (const auto& table_info : query_infos) {
3216  if (table_info.table_id < 0) {
3217  continue;
3218  }
3219  const auto td = cat.getMetadataForTable(table_info.table_id);
3220  CHECK(td);
3221  if (table_is_replicated(td)) {
3222  return {};
3223  }
3224  }
3225  const auto input_permutation =
3226  get_node_input_permutation(left_deep_join_quals, query_infos, executor);
3227  input_to_nest_level = get_input_nest_levels(node, input_permutation);
3228  std::tie(input_descs, input_col_descs, std::ignore) =
3229  get_input_desc(node, input_to_nest_level, input_permutation, cat);
3230  return input_permutation;
3231 }
std::unordered_map< const RelAlgNode *, int > get_input_nest_levels(const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
std::string cat(Ts &&...args)
std::vector< node_t > get_node_input_permutation(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor)
CHECK(cgen_state)
bool table_is_replicated(const TableDescriptor *td)
bool g_cluster
std::tuple< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > >, std::vector< std::shared_ptr< RexInput > > > get_input_desc(const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::exe_unit_has_quals ( const RelAlgExecutionUnit  ra_exe_unit)
inline

Definition at line 2557 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::join_quals, RelAlgExecutionUnit::quals, and RelAlgExecutionUnit::simple_quals.

Referenced by RelAlgExecutor::executeWorkUnit().

2557  {
2558  return !(ra_exe_unit.quals.empty() && ra_exe_unit.join_quals.empty() &&
2559  ra_exe_unit.simple_quals.empty());
2560 }
const JoinQualsPerNestingLevel join_quals
std::list< std::shared_ptr< Analyzer::Expr > > quals
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::first_oe_is_desc ( const std::list< Analyzer::OrderEntry > &  order_entries)

Definition at line 2329 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2329  {
2330  return !order_entries.empty() && order_entries.front().is_desc;
2331 }

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals ( const RexScalar scalar)

Definition at line 3120 of file RelAlgExecutor.cpp.

References CHECK_EQ, kAND, kBW_EQ, kEQ, kISNULL, kOR, and RexVisitorBase< T >::visit().

Referenced by get_bitwise_equals_conjunction().

3120  {
3121  const auto condition = dynamic_cast<const RexOperator*>(scalar);
3122  if (!condition || condition->getOperator() != kOR || condition->size() != 2) {
3123  return nullptr;
3124  }
3125  const auto equi_join_condition =
3126  dynamic_cast<const RexOperator*>(condition->getOperand(0));
3127  if (!equi_join_condition || equi_join_condition->getOperator() != kEQ) {
3128  return nullptr;
3129  }
3130  const auto both_are_null_condition =
3131  dynamic_cast<const RexOperator*>(condition->getOperand(1));
3132  if (!both_are_null_condition || both_are_null_condition->getOperator() != kAND ||
3133  both_are_null_condition->size() != 2) {
3134  return nullptr;
3135  }
3136  const auto lhs_is_null =
3137  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(0));
3138  const auto rhs_is_null =
3139  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(1));
3140  if (!lhs_is_null || !rhs_is_null || lhs_is_null->getOperator() != kISNULL ||
3141  rhs_is_null->getOperator() != kISNULL) {
3142  return nullptr;
3143  }
3144  CHECK_EQ(size_t(1), lhs_is_null->size());
3145  CHECK_EQ(size_t(1), rhs_is_null->size());
3146  CHECK_EQ(size_t(2), equi_join_condition->size());
3147  const auto eq_lhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(0));
3148  const auto eq_rhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(1));
3149  const auto is_null_lhs = dynamic_cast<const RexInput*>(lhs_is_null->getOperand(0));
3150  const auto is_null_rhs = dynamic_cast<const RexInput*>(rhs_is_null->getOperand(0));
3151  if (!eq_lhs || !eq_rhs || !is_null_lhs || !is_null_rhs) {
3152  return nullptr;
3153  }
3154  std::vector<std::unique_ptr<const RexScalar>> eq_operands;
3155  if (*eq_lhs == *is_null_lhs && *eq_rhs == *is_null_rhs) {
3156  RexDeepCopyVisitor deep_copy_visitor;
3157  auto lhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(0));
3158  auto rhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(1));
3159  eq_operands.emplace_back(lhs_op_copy.release());
3160  eq_operands.emplace_back(rhs_op_copy.release());
3161  return boost::make_unique<const RexOperator>(
3162  kBW_EQ, eq_operands, equi_join_condition->getType());
3163  }
3164  return nullptr;
3165 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
Definition: sqldefs.h:38
Definition: sqldefs.h:30
virtual T visit(const RexScalar *rex_scalar) const
Definition: RexVisitor.h:27
Definition: sqldefs.h:37
Definition: sqldefs.h:31

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals_conjunction ( const RexScalar scalar)

Definition at line 3167 of file RelAlgExecutor.cpp.

References CHECK_GE, get_bitwise_equals(), and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3168  {
3169  const auto condition = dynamic_cast<const RexOperator*>(scalar);
3170  if (condition && condition->getOperator() == kAND) {
3171  CHECK_GE(condition->size(), size_t(2));
3172  auto acc = get_bitwise_equals(condition->getOperand(0));
3173  if (!acc) {
3174  return nullptr;
3175  }
3176  for (size_t i = 1; i < condition->size(); ++i) {
3177  std::vector<std::unique_ptr<const RexScalar>> and_operands;
3178  and_operands.emplace_back(std::move(acc));
3179  and_operands.emplace_back(get_bitwise_equals_conjunction(condition->getOperand(i)));
3180  acc =
3181  boost::make_unique<const RexOperator>(kAND, and_operands, condition->getType());
3182  }
3183  return acc;
3184  }
3185  return get_bitwise_equals(scalar);
3186 }
std::unique_ptr< const RexOperator > get_bitwise_equals_conjunction(const RexScalar *scalar)
#define CHECK_GE(x, y)
Definition: Logger.h:210
Definition: sqldefs.h:37
std::unique_ptr< const RexOperator > get_bitwise_equals(const RexScalar *scalar)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RelAlgNode* anonymous_namespace{RelAlgExecutor.cpp}::get_data_sink ( const RelAlgNode ra_node)

Definition at line 772 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), RelAlgNode::inputCount(), and join().

Referenced by get_input_desc_impl(), get_input_nest_levels(), get_inputs_meta(), get_join_source_used_inputs(), get_join_type(), and get_used_inputs().

772  {
773  if (auto join = dynamic_cast<const RelJoin*>(ra_node)) {
774  CHECK_EQ(size_t(2), join->inputCount());
775  return join;
776  }
777  if (!dynamic_cast<const RelLogicalUnion*>(ra_node)) {
778  CHECK_EQ(size_t(1), ra_node->inputCount());
779  }
780  auto only_src = ra_node->getInput(0);
781  const bool is_join = dynamic_cast<const RelJoin*>(only_src) ||
782  dynamic_cast<const RelLeftDeepInnerJoin*>(only_src);
783  return is_join ? only_src : ra_node;
784 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::tuple<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> >, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc ( const RA *  ra_node,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 1058 of file RelAlgExecutor.cpp.

References get_input_desc_impl(), get_used_inputs(), and VLOG.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and do_table_reordering().

1061  {
1062  std::unordered_set<const RexInput*> used_inputs;
1063  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
1064  std::tie(used_inputs, used_inputs_owned) = get_used_inputs(ra_node, cat);
1065  VLOG(3) << "used_inputs.size() = " << used_inputs.size();
1066  auto input_desc_pair = get_input_desc_impl(
1067  ra_node, used_inputs, input_to_nest_level, input_permutation, cat);
1068  return std::make_tuple(
1069  input_desc_pair.first, input_desc_pair.second, used_inputs_owned);
1070 }
std::pair< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > > > get_input_desc_impl(const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs(const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
#define VLOG(n)
Definition: Logger.h:291

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::pair<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc_impl ( const RA *  ra_node,
const std::unordered_set< const RexInput * > &  used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 999 of file RelAlgExecutor.cpp.

References collect_used_input_desc(), get_data_sink(), get_join_source_used_inputs(), InputDescriptor::getNestLevel(), and table_id_from_ra().

Referenced by get_input_desc().

1003  {
1004  std::vector<InputDescriptor> input_descs;
1005  const auto data_sink_node = get_data_sink(ra_node);
1006  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
1007  const auto input_node_idx =
1008  input_permutation.empty() ? input_idx : input_permutation[input_idx];
1009  auto input_ra = data_sink_node->getInput(input_node_idx);
1010  const int table_id = table_id_from_ra(input_ra);
1011  input_descs.emplace_back(table_id, input_idx);
1012  }
1013  std::sort(input_descs.begin(),
1014  input_descs.end(),
1015  [](const InputDescriptor& lhs, const InputDescriptor& rhs) {
1016  return lhs.getNestLevel() < rhs.getNestLevel();
1017  });
1018  std::unordered_set<std::shared_ptr<const InputColDescriptor>> input_col_descs_unique;
1019  collect_used_input_desc(input_descs,
1020  cat,
1021  input_col_descs_unique, // modified
1022  ra_node,
1023  used_inputs,
1024  input_to_nest_level);
1025  std::unordered_set<const RexInput*> join_source_used_inputs;
1026  std::vector<std::shared_ptr<RexInput>> join_source_used_inputs_owned;
1027  std::tie(join_source_used_inputs, join_source_used_inputs_owned) =
1028  get_join_source_used_inputs(ra_node, cat);
1029  collect_used_input_desc(input_descs,
1030  cat,
1031  input_col_descs_unique, // modified
1032  ra_node,
1033  join_source_used_inputs,
1034  input_to_nest_level);
1035  std::vector<std::shared_ptr<const InputColDescriptor>> input_col_descs(
1036  input_col_descs_unique.begin(), input_col_descs_unique.end());
1037 
1038  std::sort(input_col_descs.begin(),
1039  input_col_descs.end(),
1040  [](std::shared_ptr<const InputColDescriptor> const& lhs,
1041  std::shared_ptr<const InputColDescriptor> const& rhs) {
1042  return std::make_tuple(lhs->getScanDesc().getNestLevel(),
1043  lhs->getColId(),
1044  lhs->getScanDesc().getTableId()) <
1045  std::make_tuple(rhs->getScanDesc().getNestLevel(),
1046  rhs->getColId(),
1047  rhs->getScanDesc().getTableId());
1048  });
1049  return {input_descs,
1050  std::list<std::shared_ptr<const InputColDescriptor>>(input_col_descs.begin(),
1051  input_col_descs.end())};
1052 }
void collect_used_input_desc(std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_join_source_used_inputs(const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
int table_id_from_ra(const RelAlgNode *ra_node)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
int getNestLevel() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_map<const RelAlgNode*, int> anonymous_namespace{RelAlgExecutor.cpp}::get_input_nest_levels ( const RelAlgNode ra_node,
const std::vector< size_t > &  input_permutation 
)

Definition at line 906 of file RelAlgExecutor.cpp.

References CHECK(), get_data_sink(), logger::INFO, and LOG_IF.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and do_table_reordering().

908  {
909  const auto data_sink_node = get_data_sink(ra_node);
910  std::unordered_map<const RelAlgNode*, int> input_to_nest_level;
911  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
912  const auto input_node_idx =
913  input_permutation.empty() ? input_idx : input_permutation[input_idx];
914  const auto input_ra = data_sink_node->getInput(input_node_idx);
915  // Having a non-zero mapped value (input_idx) results in the query being interpretted
916  // as a JOIN within CodeGenerator::codegenColVar() due to rte_idx being set to the
917  // mapped value (input_idx) which originates here. This would be incorrect for UNION.
918  size_t const idx = dynamic_cast<const RelLogicalUnion*>(ra_node) ? 0 : input_idx;
919  const auto it_ok = input_to_nest_level.emplace(input_ra, idx);
920  CHECK(it_ok.second);
921  LOG_IF(INFO, !input_permutation.empty())
922  << "Assigned input " << input_ra->toString() << " to nest level " << input_idx;
923  }
924  return input_to_nest_level;
925 }
#define LOG_IF(severity, condition)
Definition: Logger.h:287
CHECK(cgen_state)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::vector<TargetMetaInfo>, std::vector<std::shared_ptr<Analyzer::Expr> > > anonymous_namespace{RelAlgExecutor.cpp}::get_inputs_meta ( const RelFilter filter,
const RelAlgTranslator translator,
const std::vector< std::shared_ptr< RexInput >> &  inputs_owned,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3841 of file RelAlgExecutor.cpp.

References CHECK(), get_data_sink(), get_exprs_not_owned(), get_targets_meta(), synthesize_inputs(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createFilterWorkUnit().

3844  {
3845  std::vector<TargetMetaInfo> in_metainfo;
3846  std::vector<std::shared_ptr<Analyzer::Expr>> exprs_owned;
3847  const auto data_sink_node = get_data_sink(filter);
3848  auto input_it = inputs_owned.begin();
3849  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
3850  const auto source = data_sink_node->getInput(nest_level);
3851  const auto scan_source = dynamic_cast<const RelScan*>(source);
3852  if (scan_source) {
3853  CHECK(source->getOutputMetainfo().empty());
3854  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources_owned;
3855  for (size_t i = 0; i < scan_source->size(); ++i, ++input_it) {
3856  scalar_sources_owned.push_back(translator.translateScalarRex(input_it->get()));
3857  }
3858  const auto source_metadata =
3859  get_targets_meta(scan_source, get_exprs_not_owned(scalar_sources_owned));
3860  in_metainfo.insert(
3861  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3862  exprs_owned.insert(
3863  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3864  } else {
3865  const auto& source_metadata = source->getOutputMetainfo();
3866  input_it += source_metadata.size();
3867  in_metainfo.insert(
3868  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3869  const auto scalar_sources_owned = synthesize_inputs(
3870  data_sink_node, nest_level, source_metadata, input_to_nest_level);
3871  exprs_owned.insert(
3872  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3873  }
3874  }
3875  return std::make_pair(in_metainfo, exprs_owned);
3876 }
std::vector< std::shared_ptr< Analyzer::Expr > > synthesize_inputs(const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
std::vector< Analyzer::Expr * > get_exprs_not_owned(const std::vector< std::shared_ptr< Analyzer::Expr >> &exprs)
Definition: Execute.h:192
CHECK(cgen_state)
std::vector< TargetMetaInfo > get_targets_meta(const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_join_source_used_inputs ( const RelAlgNode ra_node,
const Catalog_Namespace::Catalog cat 
)

Definition at line 928 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, CHECK_GT, get_data_sink(), anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelAlgNode::inputCount(), join(), run_benchmark_import::result, RelAlgNode::toString(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc_impl().

929  {
930  const auto data_sink_node = get_data_sink(ra_node);
931  if (auto join = dynamic_cast<const RelJoin*>(data_sink_node)) {
932  CHECK_EQ(join->inputCount(), 2u);
933  const auto condition = join->getCondition();
934  RexUsedInputsVisitor visitor(cat);
935  auto condition_inputs = visitor.visit(condition);
936  std::vector<std::shared_ptr<RexInput>> condition_inputs_owned(
937  visitor.get_inputs_owned());
938  return std::make_pair(condition_inputs, condition_inputs_owned);
939  }
940 
941  if (auto left_deep_join = dynamic_cast<const RelLeftDeepInnerJoin*>(data_sink_node)) {
942  CHECK_GE(left_deep_join->inputCount(), 2u);
943  const auto condition = left_deep_join->getInnerCondition();
944  RexUsedInputsVisitor visitor(cat);
945  auto result = visitor.visit(condition);
946  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
947  ++nesting_level) {
948  const auto outer_condition = left_deep_join->getOuterCondition(nesting_level);
949  if (outer_condition) {
950  const auto outer_result = visitor.visit(outer_condition);
951  result.insert(outer_result.begin(), outer_result.end());
952  }
953  }
954  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
955  return std::make_pair(result, used_inputs_owned);
956  }
957 
958  if (dynamic_cast<const RelLogicalUnion*>(ra_node)) {
959  CHECK_GT(ra_node->inputCount(), 1u) << ra_node->toString();
960  } else {
961  CHECK_EQ(ra_node->inputCount(), 1u) << ra_node->toString();
962  }
963  return std::make_pair(std::unordered_set<const RexInput*>{},
964  std::vector<std::shared_ptr<RexInput>>{});
965 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
#define CHECK_GE(x, y)
Definition: Logger.h:210
#define CHECK_GT(x, y)
Definition: Logger.h:209
virtual std::string toString() const =0
const size_t inputCount() const
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

JoinType anonymous_namespace{RelAlgExecutor.cpp}::get_join_type ( const RelAlgNode ra)

Definition at line 3108 of file RelAlgExecutor.cpp.

References get_data_sink(), INNER, INVALID, and join().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3108  {
3109  auto sink = get_data_sink(ra);
3110  if (auto join = dynamic_cast<const RelJoin*>(sink)) {
3111  return join->getJoinType();
3112  }
3113  if (dynamic_cast<const RelLeftDeepInnerJoin*>(sink)) {
3114  return JoinType::INNER;
3115  }
3116 
3117  return JoinType::INVALID;
3118 }
std::string join(T const &container, std::string const &delim)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::get_left_deep_join_input_sizes ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 3233 of file RelAlgExecutor.cpp.

References get_node_output(), RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3234  {
3235  std::vector<size_t> input_sizes;
3236  for (size_t i = 0; i < left_deep_join->inputCount(); ++i) {
3237  const auto inputs = get_node_output(left_deep_join->getInput(i));
3238  input_sizes.push_back(inputs.size());
3239  }
3240  return input_sizes;
3241 }
const RelAlgNode * getInput(const size_t idx) const
RANodeOutput get_node_output(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

SQLTypeInfo anonymous_namespace{RelAlgExecutor.cpp}::get_logical_type_for_expr ( const Analyzer::Expr expr)
inline

Definition at line 1313 of file RelAlgExecutor.cpp.

References get_logical_type_info(), get_nullable_logical_type_info(), Analyzer::Expr::get_type_info(), is_agg(), is_count_distinct(), and kBIGINT.

Referenced by get_targets_meta().

1313  {
1314  if (is_count_distinct(&expr)) {
1315  return SQLTypeInfo(kBIGINT, false);
1316  } else if (is_agg(&expr)) {
1318  }
1319  return get_logical_type_info(expr.get_type_info());
1320 }
bool is_agg(const Analyzer::Expr *expr)
SQLTypeInfo get_nullable_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:839
SQLTypeInfo get_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:818
bool is_count_distinct(const Analyzer::Expr *expr)
const SQLTypeInfo & get_type_info() const
Definition: Analyzer.h:79

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<Analyzer::OrderEntry> anonymous_namespace{RelAlgExecutor.cpp}::get_order_entries ( const RelSort sort)

Definition at line 2309 of file RelAlgExecutor.cpp.

References RelSort::collationCount(), Descending, First, RelSort::getCollation(), and run_benchmark_import::result.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2309  {
2310  std::list<Analyzer::OrderEntry> result;
2311  for (size_t i = 0; i < sort->collationCount(); ++i) {
2312  const auto sort_field = sort->getCollation(i);
2313  result.emplace_back(sort_field.getField() + 1,
2314  sort_field.getSortDir() == SortDirection::Descending,
2315  sort_field.getNullsPosition() == NullSortedPosition::First);
2316  }
2317  return result;
2318 }
SortField getCollation(const size_t i) const
size_t collationCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_set<PhysicalInput> anonymous_namespace{RelAlgExecutor.cpp}::get_physical_inputs ( const Catalog_Namespace::Catalog cat,
const RelAlgNode ra 
)

Definition at line 59 of file RelAlgExecutor.cpp.

References get_physical_inputs(), and Catalog_Namespace::Catalog::getColumnIdBySpi().

61  {
62  auto phys_inputs = get_physical_inputs(ra);
63  std::unordered_set<PhysicalInput> phys_inputs2;
64  for (auto& phi : phys_inputs) {
65  phys_inputs2.insert(
66  PhysicalInput{cat.getColumnIdBySpi(phi.table_id, phi.col_id), phi.table_id});
67  }
68  return phys_inputs2;
69 }
std::unordered_set< PhysicalInput > get_physical_inputs(const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1540

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelCompound compound)

Definition at line 1072 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSourcesSize().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1072  {
1073  return compound->getScalarSourcesSize();
1074 }
const size_t getScalarSourcesSize() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelProject project)

Definition at line 1076 of file RelAlgExecutor.cpp.

References RelProject::size().

1076  {
1077  return project->size();
1078 }
size_t size() const override

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelTableFunction table_func)

Definition at line 1080 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputsSize().

1080  {
1081  return table_func->getTableFuncInputsSize();
1082 }
size_t getTableFuncInputsSize() const

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scan_limit ( const RelAlgNode ra,
const size_t  limit 
)

Definition at line 2320 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit().

2320  {
2321  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
2322  if (aggregate) {
2323  return 0;
2324  }
2325  const auto compound = dynamic_cast<const RelCompound*>(ra);
2326  return (compound && compound->isAggregate()) ? 0 : limit;
2327 }

+ Here is the caller graph for this function:

template<class RA >
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RA *  ra_node,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1323 of file RelAlgExecutor.cpp.

References CHECK(), and get_logical_type_for_expr().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), get_inputs_meta(), and get_targets_meta().

1325  {
1326  std::vector<TargetMetaInfo> targets_meta;
1327  for (size_t i = 0; i < ra_node->size(); ++i) {
1328  CHECK(target_exprs[i]);
1329  // TODO(alex): remove the count distinct type fixup.
1330  targets_meta.emplace_back(ra_node->getFieldName(i),
1331  get_logical_type_for_expr(*target_exprs[i]),
1332  target_exprs[i]->get_type_info());
1333  }
1334  return targets_meta;
1335 }
CHECK(cgen_state)
SQLTypeInfo get_logical_type_for_expr(const Analyzer::Expr &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<>
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RelFilter filter,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1338 of file RelAlgExecutor.cpp.

References get_targets_meta(), RelAlgNode::getInput(), RelAlgNode::toString(), and UNREACHABLE.

1340  {
1341  RelAlgNode const* input0 = filter->getInput(0);
1342  if (auto const* input = dynamic_cast<RelCompound const*>(input0)) {
1343  return get_targets_meta(input, target_exprs);
1344  } else if (auto const* input = dynamic_cast<RelProject const*>(input0)) {
1345  return get_targets_meta(input, target_exprs);
1346  } else if (auto const* input = dynamic_cast<RelLogicalUnion const*>(input0)) {
1347  return get_targets_meta(input, target_exprs);
1348  } else if (auto const* input = dynamic_cast<RelAggregate const*>(input0)) {
1349  return get_targets_meta(input, target_exprs);
1350  } else if (auto const* input = dynamic_cast<RelScan const*>(input0)) {
1351  return get_targets_meta(input, target_exprs);
1352  }
1353  UNREACHABLE() << "Unhandled node type: " << input0->toString();
1354  return {};
1355 }
#define UNREACHABLE()
Definition: Logger.h:241
const RelAlgNode * getInput(const size_t idx) const
std::vector< TargetMetaInfo > get_targets_meta(const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
virtual std::string toString() const =0

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelCompound compound,
const Catalog_Namespace::Catalog cat 
)

Definition at line 787 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelCompound::getFilterExpr(), RelCompound::getScalarSource(), RelCompound::getScalarSourcesSize(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc().

787  {
788  RexUsedInputsVisitor visitor(cat);
789  const auto filter_expr = compound->getFilterExpr();
790  std::unordered_set<const RexInput*> used_inputs =
791  filter_expr ? visitor.visit(filter_expr) : std::unordered_set<const RexInput*>{};
792  const auto sources_size = compound->getScalarSourcesSize();
793  for (size_t i = 0; i < sources_size; ++i) {
794  const auto source_inputs = visitor.visit(compound->getScalarSource(i));
795  used_inputs.insert(source_inputs.begin(), source_inputs.end());
796  }
797  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
798  return std::make_pair(used_inputs, used_inputs_owned);
799 }
const RexScalar * getFilterExpr() const
const size_t getScalarSourcesSize() const
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelAggregate aggregate,
const Catalog_Namespace::Catalog cat 
)

Definition at line 802 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, RelAggregate::getAggExprs(), RelAggregate::getGroupByCount(), RelAlgNode::getInput(), RelAlgNode::getOutputMetainfo(), and RelAlgNode::inputCount().

802  {
803  CHECK_EQ(size_t(1), aggregate->inputCount());
804  std::unordered_set<const RexInput*> used_inputs;
805  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
806  const auto source = aggregate->getInput(0);
807  const auto& in_metainfo = source->getOutputMetainfo();
808  const auto group_count = aggregate->getGroupByCount();
809  CHECK_GE(in_metainfo.size(), group_count);
810  for (size_t i = 0; i < group_count; ++i) {
811  auto synthesized_used_input = new RexInput(source, i);
812  used_inputs_owned.emplace_back(synthesized_used_input);
813  used_inputs.insert(synthesized_used_input);
814  }
815  for (const auto& agg_expr : aggregate->getAggExprs()) {
816  for (size_t i = 0; i < agg_expr->size(); ++i) {
817  const auto operand_idx = agg_expr->getOperand(i);
818  CHECK_GE(in_metainfo.size(), static_cast<size_t>(operand_idx));
819  auto synthesized_used_input = new RexInput(source, operand_idx);
820  used_inputs_owned.emplace_back(synthesized_used_input);
821  used_inputs.insert(synthesized_used_input);
822  }
823  }
824  return std::make_pair(used_inputs, used_inputs_owned);
825 }
const size_t getGroupByCount() const
#define CHECK_EQ(x, y)
Definition: Logger.h:205
#define CHECK_GE(x, y)
Definition: Logger.h:210
const RelAlgNode * getInput(const size_t idx) const
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
const size_t inputCount() const
const std::vector< TargetMetaInfo > & getOutputMetainfo() const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelProject project,
const Catalog_Namespace::Catalog cat 
)

Definition at line 828 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelProject::getProjectAt(), RelProject::size(), and RexVisitorBase< T >::visit().

828  {
829  RexUsedInputsVisitor visitor(cat);
830  std::unordered_set<const RexInput*> used_inputs;
831  for (size_t i = 0; i < project->size(); ++i) {
832  const auto proj_inputs = visitor.visit(project->getProjectAt(i));
833  used_inputs.insert(proj_inputs.begin(), proj_inputs.end());
834  }
835  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
836  return std::make_pair(used_inputs, used_inputs_owned);
837 }
size_t size() const override
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelTableFunction table_func,
const Catalog_Namespace::Catalog cat 
)

Definition at line 840 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelTableFunction::getTableFuncInputAt(), RelTableFunction::getTableFuncInputsSize(), and RexVisitorBase< T >::visit().

841  {
842  RexUsedInputsVisitor visitor(cat);
843  std::unordered_set<const RexInput*> used_inputs;
844  for (size_t i = 0; i < table_func->getTableFuncInputsSize(); ++i) {
845  const auto table_func_inputs = visitor.visit(table_func->getTableFuncInputAt(i));
846  used_inputs.insert(table_func_inputs.begin(), table_func_inputs.end());
847  }
848  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
849  return std::make_pair(used_inputs, used_inputs_owned);
850 }
size_t getTableFuncInputsSize() const
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelFilter filter,
const Catalog_Namespace::Catalog cat 
)

Definition at line 853 of file RelAlgExecutor.cpp.

References CHECK(), and get_data_sink().

853  {
854  std::unordered_set<const RexInput*> used_inputs;
855  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
856  const auto data_sink_node = get_data_sink(filter);
857  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
858  const auto source = data_sink_node->getInput(nest_level);
859  const auto scan_source = dynamic_cast<const RelScan*>(source);
860  if (scan_source) {
861  CHECK(source->getOutputMetainfo().empty());
862  for (size_t i = 0; i < scan_source->size(); ++i) {
863  auto synthesized_used_input = new RexInput(scan_source, i);
864  used_inputs_owned.emplace_back(synthesized_used_input);
865  used_inputs.insert(synthesized_used_input);
866  }
867  } else {
868  const auto& partial_in_metadata = source->getOutputMetainfo();
869  for (size_t i = 0; i < partial_in_metadata.size(); ++i) {
870  auto synthesized_used_input = new RexInput(source, i);
871  used_inputs_owned.emplace_back(synthesized_used_input);
872  used_inputs.insert(synthesized_used_input);
873  }
874  }
875  }
876  return std::make_pair(used_inputs, used_inputs_owned);
877 }
CHECK(cgen_state)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelLogicalUnion logical_union,
const Catalog_Namespace::Catalog  
)

Definition at line 880 of file RelAlgExecutor.cpp.

References RelAlgNode::getInput(), RelAlgNode::inputCount(), and VLOG.

880  {
881  std::unordered_set<const RexInput*> used_inputs(logical_union->inputCount());
882  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
883  used_inputs_owned.reserve(logical_union->inputCount());
884  VLOG(3) << "logical_union->inputCount()=" << logical_union->inputCount();
885  auto const n_inputs = logical_union->inputCount();
886  for (size_t nest_level = 0; nest_level < n_inputs; ++nest_level) {
887  auto input = logical_union->getInput(nest_level);
888  for (size_t i = 0; i < input->size(); ++i) {
889  used_inputs_owned.emplace_back(std::make_shared<RexInput>(input, i));
890  used_inputs.insert(used_inputs_owned.back().get());
891  }
892  }
893  return std::make_pair(std::move(used_inputs), std::move(used_inputs_owned));
894 }
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const
#define VLOG(n)
Definition: Logger.h:291

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::groups_approx_upper_bound ( const std::vector< InputTableInfo > &  table_infos)

Upper bound estimation for the number of groups. Not strictly correct and not tight, but if the tables involved are really small we shouldn't waste time doing the NDV estimation. We don't account for cross-joins and / or group by unnested array, which is the reason this estimation isn't entirely reliable.

Definition at line 2526 of file RelAlgExecutor.cpp.

References CHECK().

Referenced by RelAlgExecutor::executeWorkUnit().

2526  {
2527  CHECK(!table_infos.empty());
2528  const auto& first_table = table_infos.front();
2529  size_t max_num_groups = first_table.info.getNumTuplesUpperBound();
2530  for (const auto& table_info : table_infos) {
2531  if (table_info.info.getNumTuplesUpperBound() > max_num_groups) {
2532  max_num_groups = table_info.info.getNumTuplesUpperBound();
2533  }
2534  }
2535  return std::max(max_num_groups, size_t(1));
2536 }
CHECK(cgen_state)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const std::string &  columnName,
const SQLTypeInfo columnType,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 1955 of file RelAlgExecutor.cpp.

References CHECK(), logger::ERROR, SQLTypeInfo::get_comp_param(), Analyzer::Constant::get_constval(), Analyzer::Constant::get_is_null(), Catalog_Namespace::Catalog::getMetadataForDict(), inline_fixed_encoding_null_val(), LOG, and Datum::stringval.

Referenced by RelAlgExecutor::executeSimpleInsert(), and insert_one_dict_str().

1959  {
1960  if (col_cv->get_is_null()) {
1961  *col_data = inline_fixed_encoding_null_val(columnType);
1962  } else {
1963  const int dict_id = columnType.get_comp_param();
1964  const auto col_datum = col_cv->get_constval();
1965  const auto& str = *col_datum.stringval;
1966  const auto dd = catalog.getMetadataForDict(dict_id);
1967  CHECK(dd && dd->stringDict);
1968  int32_t str_id = dd->stringDict->getOrAdd(str);
1969  if (!dd->dictIsTemp) {
1970  const auto checkpoint_ok = dd->stringDict->checkpoint();
1971  if (!checkpoint_ok) {
1972  throw std::runtime_error("Failed to checkpoint dictionary for column " +
1973  columnName);
1974  }
1975  }
1976  const bool invalid = str_id > max_valid_int_value<T>();
1977  if (invalid || str_id == inline_int_null_value<int32_t>()) {
1978  if (invalid) {
1979  LOG(ERROR) << "Could not encode string: " << str
1980  << ", the encoded value doesn't fit in " << sizeof(T) * 8
1981  << " bits. Will store NULL instead.";
1982  }
1983  str_id = inline_fixed_encoding_null_val(columnType);
1984  }
1985  *col_data = str_id;
1986  }
1987  return *col_data;
1988 }
#define LOG(tag)
Definition: Logger.h:188
bool get_is_null() const
Definition: Analyzer.h:335
CHECK(cgen_state)
const DictDescriptor * getMetadataForDict(int dict_ref, bool loadDict=true) const
Definition: Catalog.cpp:1449
std::string * stringval
Definition: sqltypes.h:142
Datum get_constval() const
Definition: Analyzer.h:336
HOST DEVICE int get_comp_param() const
Definition: sqltypes.h:267
int64_t inline_fixed_encoding_null_val(const SQL_TYPE_INFO &ti)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const ColumnDescriptor cd,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 1991 of file RelAlgExecutor.cpp.

References ColumnDescriptor::columnName, ColumnDescriptor::columnType, and insert_one_dict_str().

1994  {
1995  return insert_one_dict_str(col_data, cd->columnName, cd->columnType, col_cv, catalog);
1996 }
int64_t insert_one_dict_str(T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
SQLTypeInfo columnType
std::string columnName

+ Here is the call graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_agg ( const Analyzer::Expr expr)

Definition at line 1301 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_aggtype(), kAVG, kMAX, kMIN, and kSUM.

Referenced by anonymous_namespace{RelAlgDagBuilder.cpp}::create_compound(), RelAlgExecutor::executeWorkUnit(), get_logical_type_for_expr(), and ResultSet::getSingleSlotTargetBitmap().

1301  {
1302  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1303  if (agg_expr && agg_expr->get_contains_agg()) {
1304  auto agg_type = agg_expr->get_aggtype();
1305  if (agg_type == SQLAgg::kMIN || agg_type == SQLAgg::kMAX ||
1306  agg_type == SQLAgg::kSUM || agg_type == SQLAgg::kAVG) {
1307  return true;
1308  }
1309  }
1310  return false;
1311 }
Definition: sqldefs.h:73
Definition: sqldefs.h:75
SQLAgg get_aggtype() const
Definition: Analyzer.h:1096
Definition: sqldefs.h:74
Definition: sqldefs.h:72

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_count_distinct ( const Analyzer::Expr expr)

Definition at line 1296 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_is_distinct().

Referenced by get_logical_type_for_expr().

1296  {
1297  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1298  return agg_expr && agg_expr->get_is_distinct();
1299 }
bool get_is_distinct() const
Definition: Analyzer.h:1099

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_window_execution_unit ( const RelAlgExecutionUnit ra_exe_unit)

Definition at line 1585 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

1585  {
1586  return std::any_of(ra_exe_unit.target_exprs.begin(),
1587  ra_exe_unit.target_exprs.end(),
1588  [](const Analyzer::Expr* expr) {
1589  return dynamic_cast<const Analyzer::WindowFunction*>(expr);
1590  });
1591 }
std::vector< Analyzer::Expr * > target_exprs

+ Here is the caller graph for this function:

std::vector<JoinType> anonymous_namespace{RelAlgExecutor.cpp}::left_deep_join_types ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 3188 of file RelAlgExecutor.cpp.

References CHECK_GE, RelLeftDeepInnerJoin::getOuterCondition(), INNER, RelAlgNode::inputCount(), and LEFT.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::translateLeftDeepJoinFilter().

3188  {
3189  CHECK_GE(left_deep_join->inputCount(), size_t(2));
3190  std::vector<JoinType> join_types(left_deep_join->inputCount() - 1, JoinType::INNER);
3191  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
3192  ++nesting_level) {
3193  if (left_deep_join->getOuterCondition(nesting_level)) {
3194  join_types[nesting_level - 1] = JoinType::LEFT;
3195  }
3196  }
3197  return join_types;
3198 }
const RexScalar * getOuterCondition(const size_t nesting_level) const
#define CHECK_GE(x, y)
Definition: Logger.h:210
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class QualsList >
bool anonymous_namespace{RelAlgExecutor.cpp}::list_contains_expression ( const QualsList &  haystack,
const std::shared_ptr< Analyzer::Expr > &  needle 
)

Definition at line 3367 of file RelAlgExecutor.cpp.

Referenced by reverse_logical_distribution().

3368  {
3369  for (const auto& qual : haystack) {
3370  if (*qual == *needle) {
3371  return true;
3372  }
3373  }
3374  return false;
3375 }

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::node_is_aggregate ( const RelAlgNode ra)

Definition at line 53 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

53  {
54  const auto compound = dynamic_cast<const RelCompound*>(ra);
55  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
56  return ((compound && compound->isAggregate()) || aggregate);
57 }

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::reverse_logical_distribution ( const std::shared_ptr< Analyzer::Expr > &  expr)

Definition at line 3380 of file RelAlgExecutor.cpp.

References build_logical_expression(), CHECK_GE, kAND, kONE, kOR, list_contains_expression(), Parser::OperExpr::normalize(), qual_to_conjunctive_form(), and qual_to_disjunctive_form().

Referenced by RelAlgExecutor::makeJoinQuals().

3381  {
3382  const auto expr_terms = qual_to_disjunctive_form(expr);
3383  CHECK_GE(expr_terms.size(), size_t(1));
3384  const auto& first_term = expr_terms.front();
3385  const auto first_term_factors = qual_to_conjunctive_form(first_term);
3386  std::vector<std::shared_ptr<Analyzer::Expr>> common_factors;
3387  // First, collect the conjunctive components common to all the disjunctive components.
3388  // Don't do it for simple qualifiers, we only care about expensive or join qualifiers.
3389  for (const auto& first_term_factor : first_term_factors.quals) {
3390  bool is_common =
3391  expr_terms.size() > 1; // Only report common factors for disjunction.
3392  for (size_t i = 1; i < expr_terms.size(); ++i) {
3393  const auto crt_term_factors = qual_to_conjunctive_form(expr_terms[i]);
3394  if (!list_contains_expression(crt_term_factors.quals, first_term_factor)) {
3395  is_common = false;
3396  break;
3397  }
3398  }
3399  if (is_common) {
3400  common_factors.push_back(first_term_factor);
3401  }
3402  }
3403  if (common_factors.empty()) {
3404  return expr;
3405  }
3406  // Now that the common expressions are known, collect the remaining expressions.
3407  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_terms;
3408  for (const auto& term : expr_terms) {
3409  const auto term_cf = qual_to_conjunctive_form(term);
3410  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_quals(
3411  term_cf.simple_quals.begin(), term_cf.simple_quals.end());
3412  for (const auto& qual : term_cf.quals) {
3413  if (!list_contains_expression(common_factors, qual)) {
3414  remaining_quals.push_back(qual);
3415  }
3416  }
3417  if (!remaining_quals.empty()) {
3418  remaining_terms.push_back(build_logical_expression(remaining_quals, kAND));
3419  }
3420  }
3421  // Reconstruct the expression with the transformation applied.
3422  const auto common_expr = build_logical_expression(common_factors, kAND);
3423  if (remaining_terms.empty()) {
3424  return common_expr;
3425  }
3426  const auto remaining_expr = build_logical_expression(remaining_terms, kOR);
3427  return Parser::OperExpr::normalize(kAND, kONE, common_expr, remaining_expr);
3428 }
Definition: sqldefs.h:38
#define CHECK_GE(x, y)
Definition: Logger.h:210
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
bool list_contains_expression(const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
Definition: sqldefs.h:37
std::shared_ptr< Analyzer::Expr > build_logical_expression(const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:272
Definition: sqldefs.h:69
std::vector< std::shared_ptr< Analyzer::Expr > > qual_to_disjunctive_form(const std::shared_ptr< Analyzer::Expr > &qual_expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::rewrite_quals ( const std::list< std::shared_ptr< Analyzer::Expr >> &  quals)

Definition at line 3243 of file RelAlgExecutor.cpp.

References rewrite_expr().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

3244  {
3245  std::list<std::shared_ptr<Analyzer::Expr>> rewritten_quals;
3246  for (const auto& qual : quals) {
3247  const auto rewritten_qual = rewrite_expr(qual.get());
3248  rewritten_quals.push_back(rewritten_qual ? rewritten_qual : qual);
3249  }
3250  return rewritten_quals;
3251 }
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<const RexScalar*> anonymous_namespace{RelAlgExecutor.cpp}::rex_to_conjunctive_form ( const RexScalar qual_expr)

Definition at line 3340 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3340  {
3341  CHECK(qual_expr);
3342  const auto bin_oper = dynamic_cast<const RexOperator*>(qual_expr);
3343  if (!bin_oper || bin_oper->getOperator() != kAND) {
3344  return {qual_expr};
3345  }
3346  CHECK_GE(bin_oper->size(), size_t(2));
3347  auto lhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(0));
3348  for (size_t i = 1; i < bin_oper->size(); ++i) {
3349  const auto rhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(i));
3350  lhs_cf.insert(lhs_cf.end(), rhs_cf.begin(), rhs_cf.end());
3351  }
3352  return lhs_cf;
3353 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::vector< const RexScalar * > rex_to_conjunctive_form(const RexScalar *qual_expr)
CHECK(cgen_state)
Definition: sqldefs.h:37

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelCompound compound 
)

Definition at line 1084 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSource().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1084  {
1085  return compound->getScalarSource(i);
1086 }
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelProject project 
)

Definition at line 1088 of file RelAlgExecutor.cpp.

References RelProject::getProjectAt().

1088  {
1089  return project->getProjectAt(i);
1090 }
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelTableFunction table_func 
)

Definition at line 1092 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputAt().

1092  {
1093  return table_func->getTableFuncInputAt(i);
1094 }
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict ( const std::shared_ptr< Analyzer::Expr expr)

Definition at line 1096 of file RelAlgExecutor.cpp.

References kENCODING_DICT, kENCODING_NONE, and TRANSIENT_DICT_ID.

Referenced by set_transient_dict_maybe(), translate_groupby_exprs(), and translate_targets().

1097  {
1098  const auto& ti = expr->get_type_info();
1099  if (!ti.is_string() || ti.get_compression() != kENCODING_NONE) {
1100  return expr;
1101  }
1102  auto transient_dict_ti = ti;
1103  transient_dict_ti.set_compression(kENCODING_DICT);
1104  transient_dict_ti.set_comp_param(TRANSIENT_DICT_ID);
1105  transient_dict_ti.set_fixed_size();
1106  return expr->add_cast(transient_dict_ti);
1107 }
#define TRANSIENT_DICT_ID
Definition: sqltypes.h:197

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict_maybe ( std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::shared_ptr< Analyzer::Expr > &  expr 
)

Definition at line 1109 of file RelAlgExecutor.cpp.

References fold_expr(), and set_transient_dict().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1111  {
1112  try {
1113  scalar_sources.push_back(set_transient_dict(fold_expr(expr.get())));
1114  } catch (...) {
1115  scalar_sources.push_back(fold_expr(expr.get()));
1116  }
1117 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::synthesize_inputs ( const RelAlgNode ra_node,
const size_t  nest_level,
const std::vector< TargetMetaInfo > &  in_metainfo,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3501 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, CHECK_LE, RelAlgNode::getInput(), RelAlgNode::inputCount(), and table_id_from_ra().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and get_inputs_meta().

3505  {
3506  CHECK_LE(size_t(1), ra_node->inputCount());
3507  CHECK_GE(size_t(2), ra_node->inputCount());
3508  const auto input = ra_node->getInput(nest_level);
3509  const auto it_rte_idx = input_to_nest_level.find(input);
3510  CHECK(it_rte_idx != input_to_nest_level.end());
3511  const int rte_idx = it_rte_idx->second;
3512  const int table_id = table_id_from_ra(input);
3513  std::vector<std::shared_ptr<Analyzer::Expr>> inputs;
3514  const auto scan_ra = dynamic_cast<const RelScan*>(input);
3515  int input_idx = 0;
3516  for (const auto& input_meta : in_metainfo) {
3517  inputs.push_back(
3518  std::make_shared<Analyzer::ColumnVar>(input_meta.get_type_info(),
3519  table_id,
3520  scan_ra ? input_idx + 1 : input_idx,
3521  rte_idx));
3522  ++input_idx;
3523  }
3524  return inputs;
3525 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
CHECK(cgen_state)
const RelAlgNode * getInput(const size_t idx) const
#define CHECK_LE(x, y)
Definition: Logger.h:208
int table_id_from_ra(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

int anonymous_namespace{RelAlgExecutor.cpp}::table_id_from_ra ( const RelAlgNode ra_node)

Definition at line 896 of file RelAlgExecutor.cpp.

References CHECK(), RelAlgNode::getId(), and RelScan::getTableDescriptor().

Referenced by collect_used_input_desc(), get_input_desc_impl(), and synthesize_inputs().

896  {
897  const auto scan_ra = dynamic_cast<const RelScan*>(ra_node);
898  if (scan_ra) {
899  const auto td = scan_ra->getTableDescriptor();
900  CHECK(td);
901  return td->tableId;
902  }
903  return -ra_node->getId();
904 }
CHECK(cgen_state)
unsigned getId() const
const TableDescriptor * getTableDescriptor() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::target_exprs_for_union ( RelAlgNode const *  input_node)

Definition at line 3655 of file RelAlgExecutor.cpp.

References RelAlgNode::getId(), RelAlgNode::getOutputMetainfo(), shared::printContainer(), and VLOG.

Referenced by RelAlgExecutor::createUnionWorkUnit().

3656  {
3657  std::vector<TargetMetaInfo> const& tmis = input_node->getOutputMetainfo();
3658  VLOG(3) << "input_node->getOutputMetainfo()=" << shared::printContainer(tmis);
3659  const int negative_node_id = -input_node->getId();
3660  std::vector<std::shared_ptr<Analyzer::Expr>> target_exprs;
3661  target_exprs.reserve(tmis.size());
3662  for (size_t i = 0; i < tmis.size(); ++i) {
3663  target_exprs.push_back(std::make_shared<Analyzer::ColumnVar>(
3664  tmis[i].get_type_info(), negative_node_id, i, 0));
3665  }
3666  return target_exprs;
3667 }
PrintContainer< CONTAINER > printContainer(CONTAINER &container)
Definition: misc.h:63
#define VLOG(n)
Definition: Logger.h:291

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::transform_to_inner ( const Analyzer::Expr expr)

Definition at line 1675 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::computeWindow().

1675  {
1676  const auto tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(expr);
1677  if (tuple) {
1678  std::vector<std::shared_ptr<Analyzer::Expr>> transformed_tuple;
1679  for (const auto& element : tuple->getTuple()) {
1680  transformed_tuple.push_back(transform_to_inner(element.get()));
1681  }
1682  return makeExpr<Analyzer::ExpressionTuple>(transformed_tuple);
1683  }
1684  const auto col = dynamic_cast<const Analyzer::ColumnVar*>(expr);
1685  if (!col) {
1686  throw std::runtime_error("Only columns supported in the window partition for now");
1687  }
1688  return makeExpr<Analyzer::ColumnVar>(
1689  col->get_type_info(), col->get_table_id(), col->get_column_id(), 1);
1690 }
std::shared_ptr< Analyzer::Expr > transform_to_inner(const Analyzer::Expr *expr)

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelCompound compound,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1192 of file RelAlgExecutor.cpp.

References RelCompound::getGroupByCount(), RelCompound::isAggregate(), and set_transient_dict().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1194  {
1195  if (!compound->isAggregate()) {
1196  return {nullptr};
1197  }
1198  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1199  for (size_t group_idx = 0; group_idx < compound->getGroupByCount(); ++group_idx) {
1200  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1201  }
1202  return groupby_exprs;
1203 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
const size_t getGroupByCount() const
bool isAggregate() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelAggregate aggregate,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1205 of file RelAlgExecutor.cpp.

References RelAggregate::getGroupByCount(), and set_transient_dict().

1207  {
1208  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1209  for (size_t group_idx = 0; group_idx < aggregate->getGroupByCount(); ++group_idx) {
1210  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1211  }
1212  return groupby_exprs;
1213 }
const size_t getGroupByCount() const
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)

+ Here is the call graph for this function:

QualsConjunctiveForm anonymous_namespace{RelAlgExecutor.cpp}::translate_quals ( const RelCompound compound,
const RelAlgTranslator translator 
)

Definition at line 1215 of file RelAlgExecutor.cpp.

References fold_expr(), RelCompound::getFilterExpr(), qual_to_conjunctive_form(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

1216  {
1217  const auto filter_rex = compound->getFilterExpr();
1218  const auto filter_expr =
1219  filter_rex ? translator.translateScalarRex(filter_rex) : nullptr;
1220  return filter_expr ? qual_to_conjunctive_form(fold_expr(filter_expr.get()))
1222 }
const RexScalar * getFilterExpr() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources ( const RA *  ra_node,
const RelAlgTranslator translator,
const ::ExecutorType  executor_type 
)

Definition at line 1129 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), fold_expr(), get_scalar_sources_size(), Native, rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), RelAlgTranslator::translateScalarRex(), and VLOG.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::createTableFunctionWorkUnit().

1132  {
1133  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1134  const size_t scalar_sources_size = get_scalar_sources_size(ra_node);
1135  VLOG(3) << "get_scalar_sources_size(" << ra_node->toString()
1136  << ") = " << scalar_sources_size;
1137  for (size_t i = 0; i < scalar_sources_size; ++i) {
1138  const auto scalar_rex = scalar_at(i, ra_node);
1139  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1140  // RexRef are synthetic scalars we append at the end of the real ones
1141  // for the sake of taking memory ownership, no real work needed here.
1142  continue;
1143  }
1144 
1145  const auto scalar_expr =
1146  rewrite_array_elements(translator.translateScalarRex(scalar_rex).get());
1147  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1148  if (executor_type == ExecutorType::Native) {
1149  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1150  } else {
1151  scalar_sources.push_back(cast_dict_to_none(fold_expr(rewritten_expr.get())));
1152  }
1153  }
1154 
1155  return scalar_sources;
1156 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
#define VLOG(n)
Definition: Logger.h:291
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources_for_update ( const RA *  ra_node,
const RelAlgTranslator translator,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const ColumnNameList colNames,
size_t  starting_projection_column_idx 
)

Definition at line 1159 of file RelAlgExecutor.cpp.

References cat(), get_scalar_sources_size(), rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

1165  {
1166  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1167  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
1168  const auto scalar_rex = scalar_at(i, ra_node);
1169  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1170  // RexRef are synthetic scalars we append at the end of the real ones
1171  // for the sake of taking memory ownership, no real work needed here.
1172  continue;
1173  }
1174 
1175  std::shared_ptr<Analyzer::Expr> translated_expr;
1176  if (i >= starting_projection_column_idx && i < get_scalar_sources_size(ra_node) - 1) {
1177  translated_expr = cast_to_column_type(translator.translateScalarRex(scalar_rex),
1178  tableId,
1179  cat,
1180  colNames[i - starting_projection_column_idx]);
1181  } else {
1182  translated_expr = translator.translateScalarRex(scalar_rex);
1183  }
1184  const auto scalar_expr = rewrite_array_elements(translated_expr.get());
1185  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1186  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1187  }
1188 
1189  return scalar_sources;
1190 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::string cat(Ts &&...args)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelCompound compound,
const RelAlgTranslator translator,
const ExecutorType  executor_type 
)

Definition at line 1224 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), CHECK(), CHECK_GE, CHECK_LE, fold_expr(), RexRef::getIndex(), RelCompound::getTargetExpr(), Analyzer::Var::kGROUPBY, Native, rewrite_expr(), set_transient_dict(), RelCompound::size(), RelAlgTranslator::translateAggregateRex(), RelAlgTranslator::translateScalarRex(), and var_ref().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1230  {
1231  std::vector<Analyzer::Expr*> target_exprs;
1232  for (size_t i = 0; i < compound->size(); ++i) {
1233  const auto target_rex = compound->getTargetExpr(i);
1234  const auto target_rex_agg = dynamic_cast<const RexAgg*>(target_rex);
1235  std::shared_ptr<Analyzer::Expr> target_expr;
1236  if (target_rex_agg) {
1237  target_expr =
1238  RelAlgTranslator::translateAggregateRex(target_rex_agg, scalar_sources);
1239  } else {
1240  const auto target_rex_scalar = dynamic_cast<const RexScalar*>(target_rex);
1241  const auto target_rex_ref = dynamic_cast<const RexRef*>(target_rex_scalar);
1242  if (target_rex_ref) {
1243  const auto ref_idx = target_rex_ref->getIndex();
1244  CHECK_GE(ref_idx, size_t(1));
1245  CHECK_LE(ref_idx, groupby_exprs.size());
1246  const auto groupby_expr = *std::next(groupby_exprs.begin(), ref_idx - 1);
1247  target_expr = var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, ref_idx);
1248  } else {
1249  target_expr = translator.translateScalarRex(target_rex_scalar);
1250  auto rewritten_expr = rewrite_expr(target_expr.get());
1251  target_expr = fold_expr(rewritten_expr.get());
1252  if (executor_type == ExecutorType::Native) {
1253  try {
1254  target_expr = set_transient_dict(target_expr);
1255  } catch (...) {
1256  // noop
1257  }
1258  } else {
1259  target_expr = cast_dict_to_none(target_expr);
1260  }
1261  }
1262  }
1263  CHECK(target_expr);
1264  target_exprs_owned.push_back(target_expr);
1265  target_exprs.push_back(target_expr.get());
1266  }
1267  return target_exprs;
1268 }
const Rex * getTargetExpr(const size_t i) const
size_t getIndex() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t size() const override
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1670
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
CHECK(cgen_state)
#define CHECK_LE(x, y)
Definition: Logger.h:208
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelAggregate aggregate,
const RelAlgTranslator translator 
)

Definition at line 1270 of file RelAlgExecutor.cpp.

References CHECK(), fold_expr(), RelAggregate::getAggExprs(), Analyzer::Var::kGROUPBY, RelAlgTranslator::translateAggregateRex(), and var_ref().

1275  {
1276  std::vector<Analyzer::Expr*> target_exprs;
1277  size_t group_key_idx = 1;
1278  for (const auto& groupby_expr : groupby_exprs) {
1279  auto target_expr =
1280  var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, group_key_idx++);
1281  target_exprs_owned.push_back(target_expr);
1282  target_exprs.push_back(target_expr.get());
1283  }
1284 
1285  for (const auto& target_rex_agg : aggregate->getAggExprs()) {
1286  auto target_expr =
1287  RelAlgTranslator::translateAggregateRex(target_rex_agg.get(), scalar_sources);
1288  CHECK(target_expr);
1289  target_expr = fold_expr(target_expr.get());
1290  target_exprs_owned.push_back(target_expr);
1291  target_exprs.push_back(target_expr.get());
1292  }
1293  return target_exprs;
1294 }
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1670
CHECK(cgen_state)
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function: