OmniSciDB  bf83d84833
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
anonymous_namespace{RelAlgExecutor.cpp} Namespace Reference

Classes

struct  ExecutorMutexHolder
 
class  RexUsedInputsVisitor
 

Functions

bool node_is_aggregate (const RelAlgNode *ra)
 
std::unordered_set< PhysicalInputget_physical_inputs (const Catalog_Namespace::Catalog &cat, const RelAlgNode *ra)
 
bool is_metadata_placeholder (const ChunkMetadata &metadata)
 
void prepare_foreign_table_for_execution (const RelAlgNode &ra_node, int database_id)
 
void check_sort_node_source_constraint (const RelSort *sort)
 
const RelAlgNodeget_data_sink (const RelAlgNode *ra_node)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelAggregate *aggregate, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelProject *project, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelTableFunction *table_func, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelFilter *filter, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelLogicalUnion *logical_union, const Catalog_Namespace::Catalog &)
 
int table_id_from_ra (const RelAlgNode *ra_node)
 
std::unordered_map< const
RelAlgNode *, int > 
get_input_nest_levels (const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_join_source_used_inputs (const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
 
void collect_used_input_desc (std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
template<class RA >
std::pair< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor > > > 
get_input_desc_impl (const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
template<class RA >
std::tuple< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor >
>, std::vector
< std::shared_ptr< RexInput > > > 
get_input_desc (const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
size_t get_scalar_sources_size (const RelCompound *compound)
 
size_t get_scalar_sources_size (const RelProject *project)
 
size_t get_scalar_sources_size (const RelTableFunction *table_func)
 
const RexScalarscalar_at (const size_t i, const RelCompound *compound)
 
const RexScalarscalar_at (const size_t i, const RelProject *project)
 
const RexScalarscalar_at (const size_t i, const RelTableFunction *table_func)
 
std::shared_ptr< Analyzer::Exprset_transient_dict (const std::shared_ptr< Analyzer::Expr > expr)
 
void set_transient_dict_maybe (std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
 
std::shared_ptr< Analyzer::Exprcast_dict_to_none (const std::shared_ptr< Analyzer::Expr > &input)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources (const RA *ra_node, const RelAlgTranslator &translator, const ::ExecutorType executor_type)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources_for_update (const RA *ra_node, const RelAlgTranslator &translator, int32_t tableId, const Catalog_Namespace::Catalog &cat, const ColumnNameList &colNames, size_t starting_projection_column_idx)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelCompound *compound, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelAggregate *aggregate, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
QualsConjunctiveForm translate_quals (const RelCompound *compound, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelCompound *compound, const RelAlgTranslator &translator, const ExecutorType executor_type)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelAggregate *aggregate, const RelAlgTranslator &translator)
 
bool is_count_distinct (const Analyzer::Expr *expr)
 
bool is_agg (const Analyzer::Expr *expr)
 
SQLTypeInfo get_logical_type_for_expr (const Analyzer::Expr &expr)
 
template<class RA >
std::vector< TargetMetaInfoget_targets_meta (const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
 
template<>
std::vector< TargetMetaInfoget_targets_meta (const RelFilter *filter, const std::vector< Analyzer::Expr * > &target_exprs)
 
bool is_window_execution_unit (const RelAlgExecutionUnit &ra_exe_unit)
 
std::shared_ptr< Analyzer::Exprtransform_to_inner (const Analyzer::Expr *expr)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
template<class T >
int64_t insert_one_dict_str (T *col_data, const ColumnDescriptor *cd, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
 
std::list< Analyzer::OrderEntryget_order_entries (const RelSort *sort)
 
size_t get_scan_limit (const RelAlgNode *ra, const size_t limit)
 
bool first_oe_is_desc (const std::list< Analyzer::OrderEntry > &order_entries)
 
size_t groups_approx_upper_bound (const std::vector< InputTableInfo > &table_infos)
 
bool compute_output_buffer_size (const RelAlgExecutionUnit &ra_exe_unit)
 
bool exe_unit_has_quals (const RelAlgExecutionUnit ra_exe_unit)
 
RelAlgExecutionUnit decide_approx_count_distinct_implementation (const RelAlgExecutionUnit &ra_exe_unit_in, const std::vector< InputTableInfo > &table_infos, const Executor *executor, const ExecutorDeviceType device_type_in, std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned)
 
void build_render_targets (RenderInfo &render_info, const std::vector< Analyzer::Expr * > &work_unit_target_exprs, const std::vector< TargetMetaInfo > &targets_meta)
 
bool can_use_bump_allocator (const RelAlgExecutionUnit &ra_exe_unit, const CompilationOptions &co, const ExecutionOptions &eo)
 
JoinType get_join_type (const RelAlgNode *ra)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals (const RexScalar *scalar)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals_conjunction (const RexScalar *scalar)
 
std::vector< JoinTypeleft_deep_join_types (const RelLeftDeepInnerJoin *left_deep_join)
 
template<class RA >
std::vector< size_t > do_table_reordering (std::vector< InputDescriptor > &input_descs, std::list< std::shared_ptr< const InputColDescriptor >> &input_col_descs, const JoinQualsPerNestingLevel &left_deep_join_quals, std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const RA *node, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::vector< size_t > get_left_deep_join_input_sizes (const RelLeftDeepInnerJoin *left_deep_join)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
rewrite_quals (const std::list< std::shared_ptr< Analyzer::Expr >> &quals)
 
std::vector< const RexScalar * > rex_to_conjunctive_form (const RexScalar *qual_expr)
 
std::shared_ptr< Analyzer::Exprbuild_logical_expression (const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
 
template<class QualsList >
bool list_contains_expression (const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
 
std::shared_ptr< Analyzer::Exprreverse_logical_distribution (const std::shared_ptr< Analyzer::Expr > &expr)
 
std::vector< std::shared_ptr
< Analyzer::Expr > > 
synthesize_inputs (const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
std::vector< std::shared_ptr
< Analyzer::Expr > > 
target_exprs_for_union (RelAlgNode const *input_node)
 
std::pair< std::vector
< TargetMetaInfo >
, std::vector< std::shared_ptr
< Analyzer::Expr > > > 
get_inputs_meta (const RelFilter *filter, const RelAlgTranslator &translator, const std::vector< std::shared_ptr< RexInput >> &inputs_owned, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 

Function Documentation

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::build_logical_expression ( const std::vector< std::shared_ptr< Analyzer::Expr >> &  factors,
const SQLOps  sql_op 
)

Definition at line 3429 of file RelAlgExecutor.cpp.

References CHECK, kONE, and Parser::OperExpr::normalize().

Referenced by reverse_logical_distribution().

3431  {
3432  CHECK(!factors.empty());
3433  auto acc = factors.front();
3434  for (size_t i = 1; i < factors.size(); ++i) {
3435  acc = Parser::OperExpr::normalize(sql_op, kONE, acc, factors[i]);
3436  }
3437  return acc;
3438 }
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:276
Definition: sqldefs.h:69
#define CHECK(condition)
Definition: Logger.h:197

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::build_render_targets ( RenderInfo render_info,
const std::vector< Analyzer::Expr * > &  work_unit_target_exprs,
const std::vector< TargetMetaInfo > &  targets_meta 
)

Definition at line 2705 of file RelAlgExecutor.cpp.

References CHECK_EQ, and RenderInfo::targets.

Referenced by RelAlgExecutor::executeWorkUnit().

2707  {
2708  CHECK_EQ(work_unit_target_exprs.size(), targets_meta.size());
2709  render_info.targets.clear();
2710  for (size_t i = 0; i < targets_meta.size(); ++i) {
2711  render_info.targets.emplace_back(std::make_shared<Analyzer::TargetEntry>(
2712  targets_meta[i].get_resname(),
2713  work_unit_target_exprs[i]->get_shared_ptr(),
2714  false));
2715  }
2716 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::vector< std::shared_ptr< Analyzer::TargetEntry > > targets
Definition: RenderInfo.h:37

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::can_use_bump_allocator ( const RelAlgExecutionUnit ra_exe_unit,
const CompilationOptions co,
const ExecutionOptions eo 
)
inline

Definition at line 2718 of file RelAlgExecutor.cpp.

References CompilationOptions::device_type, g_enable_bump_allocator, GPU, SortInfo::order_entries, ExecutionOptions::output_columnar_hint, and RelAlgExecutionUnit::sort_info.

Referenced by RelAlgExecutor::executeWorkUnit().

2720  {
2722  !eo.output_columnar_hint && ra_exe_unit.sort_info.order_entries.empty();
2723 }
const std::list< Analyzer::OrderEntry > order_entries
const SortInfo sort_info
ExecutorDeviceType device_type
bool g_enable_bump_allocator
Definition: Execute.cpp:106

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::cast_dict_to_none ( const std::shared_ptr< Analyzer::Expr > &  input)

Definition at line 1209 of file RelAlgExecutor.cpp.

References kENCODING_DICT, and kTEXT.

Referenced by translate_scalar_sources(), and translate_targets().

1210  {
1211  const auto& input_ti = input->get_type_info();
1212  if (input_ti.is_string() && input_ti.get_compression() == kENCODING_DICT) {
1213  return input->add_cast(SQLTypeInfo(kTEXT, input_ti.get_notnull()));
1214  }
1215  return input;
1216 }
Definition: sqltypes.h:51

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::check_sort_node_source_constraint ( const RelSort sort)
inline

Definition at line 467 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

467  {
468  CHECK_EQ(size_t(1), sort->inputCount());
469  const auto source = sort->getInput(0);
470  if (dynamic_cast<const RelSort*>(source)) {
471  throw std::runtime_error("Sort node not supported as input to another sort");
472  }
473 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::collect_used_input_desc ( std::vector< InputDescriptor > &  input_descs,
const Catalog_Namespace::Catalog cat,
std::unordered_set< std::shared_ptr< const InputColDescriptor >> &  input_col_descs_unique,
const RelAlgNode ra_node,
const std::unordered_set< const RexInput * > &  source_used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 1057 of file RelAlgExecutor.cpp.

References Catalog_Namespace::Catalog::getColumnIdBySpi(), table_id_from_ra(), RelAlgNode::toString(), and VLOG.

Referenced by get_input_desc_impl().

1063  {
1064  VLOG(3) << "ra_node=" << ra_node->toString()
1065  << " input_col_descs_unique.size()=" << input_col_descs_unique.size()
1066  << " source_used_inputs.size()=" << source_used_inputs.size();
1067  for (const auto used_input : source_used_inputs) {
1068  const auto input_ra = used_input->getSourceNode();
1069  const int table_id = table_id_from_ra(input_ra);
1070  const auto col_id = used_input->getIndex();
1071  auto it = input_to_nest_level.find(input_ra);
1072  if (it != input_to_nest_level.end()) {
1073  const int input_desc = it->second;
1074  input_col_descs_unique.insert(std::make_shared<const InputColDescriptor>(
1075  dynamic_cast<const RelScan*>(input_ra)
1076  ? cat.getColumnIdBySpi(table_id, col_id + 1)
1077  : col_id,
1078  table_id,
1079  input_desc));
1080  } else if (!dynamic_cast<const RelLogicalUnion*>(ra_node)) {
1081  throw std::runtime_error("Bushy joins not supported");
1082  }
1083  }
1084 }
int table_id_from_ra(const RelAlgNode *ra_node)
virtual std::string toString() const =0
#define VLOG(n)
Definition: Logger.h:291
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1538

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::compute_output_buffer_size ( const RelAlgExecutionUnit ra_exe_unit)

Determines whether a query needs to compute the size of its output buffer. Returns true for projection queries with no LIMIT or a LIMIT that exceeds the high scan limit threshold (meaning it would be cheaper to compute the number of rows passing or use the bump allocator than allocate the current scan limit per GPU)

Definition at line 2617 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::groupby_exprs, Executor::high_scan_limit, RelAlgExecutionUnit::scan_limit, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

2617  {
2618  for (const auto target_expr : ra_exe_unit.target_exprs) {
2619  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
2620  return false;
2621  }
2622  }
2623  if (ra_exe_unit.groupby_exprs.size() == 1 && !ra_exe_unit.groupby_exprs.front() &&
2624  (!ra_exe_unit.scan_limit || ra_exe_unit.scan_limit > Executor::high_scan_limit)) {
2625  return true;
2626  }
2627  return false;
2628 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
static const size_t high_scan_limit
Definition: Execute.h:461

+ Here is the caller graph for this function:

RelAlgExecutionUnit anonymous_namespace{RelAlgExecutor.cpp}::decide_approx_count_distinct_implementation ( const RelAlgExecutionUnit ra_exe_unit_in,
const std::vector< InputTableInfo > &  table_infos,
const Executor executor,
const ExecutorDeviceType  device_type_in,
std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned 
)

Definition at line 2635 of file RelAlgExecutor.cpp.

References Bitmap, CHECK, CHECK_GE, g_bigint_count, g_cluster, g_hll_precision_bits, get_agg_type(), get_count_distinct_sub_bitmap_count(), get_target_info(), getExpressionRange(), GPU, hll_size_for_rate(), Integer, kAPPROX_COUNT_DISTINCT, kCOUNT, kENCODING_DICT, kINT, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit(), and RelAlgExecutor::handleOutOfMemoryRetry().

2640  {
2641  RelAlgExecutionUnit ra_exe_unit = ra_exe_unit_in;
2642  for (size_t i = 0; i < ra_exe_unit.target_exprs.size(); ++i) {
2643  const auto target_expr = ra_exe_unit.target_exprs[i];
2644  const auto agg_info = get_target_info(target_expr, g_bigint_count);
2645  if (agg_info.agg_kind != kAPPROX_COUNT_DISTINCT) {
2646  continue;
2647  }
2648  CHECK(dynamic_cast<const Analyzer::AggExpr*>(target_expr));
2649  const auto arg = static_cast<Analyzer::AggExpr*>(target_expr)->get_own_arg();
2650  CHECK(arg);
2651  const auto& arg_ti = arg->get_type_info();
2652  // Avoid calling getExpressionRange for variable length types (string and array),
2653  // it'd trigger an assertion since that API expects to be called only for types
2654  // for which the notion of range is well-defined. A bit of a kludge, but the
2655  // logic to reject these types anyway is at lower levels in the stack and not
2656  // really worth pulling into a separate function for now.
2657  if (!(arg_ti.is_number() || arg_ti.is_boolean() || arg_ti.is_time() ||
2658  (arg_ti.is_string() && arg_ti.get_compression() == kENCODING_DICT))) {
2659  continue;
2660  }
2661  const auto arg_range = getExpressionRange(arg.get(), table_infos, executor);
2662  if (arg_range.getType() != ExpressionRangeType::Integer) {
2663  continue;
2664  }
2665  // When running distributed, the threshold for using the precise implementation
2666  // must be consistent across all leaves, otherwise we could have a mix of precise
2667  // and approximate bitmaps and we cannot aggregate them.
2668  const auto device_type = g_cluster ? ExecutorDeviceType::GPU : device_type_in;
2669  const auto bitmap_sz_bits = arg_range.getIntMax() - arg_range.getIntMin() + 1;
2670  const auto sub_bitmap_count =
2671  get_count_distinct_sub_bitmap_count(bitmap_sz_bits, ra_exe_unit, device_type);
2672  int64_t approx_bitmap_sz_bits{0};
2673  const auto error_rate =
2674  static_cast<Analyzer::AggExpr*>(target_expr)->get_error_rate();
2675  if (error_rate) {
2676  CHECK(error_rate->get_type_info().get_type() == kINT);
2677  CHECK_GE(error_rate->get_constval().intval, 1);
2678  approx_bitmap_sz_bits = hll_size_for_rate(error_rate->get_constval().intval);
2679  } else {
2680  approx_bitmap_sz_bits = g_hll_precision_bits;
2681  }
2682  CountDistinctDescriptor approx_count_distinct_desc{CountDistinctImplType::Bitmap,
2683  arg_range.getIntMin(),
2684  approx_bitmap_sz_bits,
2685  true,
2686  device_type,
2687  sub_bitmap_count};
2688  CountDistinctDescriptor precise_count_distinct_desc{CountDistinctImplType::Bitmap,
2689  arg_range.getIntMin(),
2690  bitmap_sz_bits,
2691  false,
2692  device_type,
2693  sub_bitmap_count};
2694  if (approx_count_distinct_desc.bitmapPaddedSizeBytes() >=
2695  precise_count_distinct_desc.bitmapPaddedSizeBytes()) {
2696  auto precise_count_distinct = makeExpr<Analyzer::AggExpr>(
2697  get_agg_type(kCOUNT, arg.get()), kCOUNT, arg, true, nullptr);
2698  target_exprs_owned.push_back(precise_count_distinct);
2699  ra_exe_unit.target_exprs[i] = precise_count_distinct.get();
2700  }
2701  }
2702  return ra_exe_unit;
2703 }
std::vector< Analyzer::Expr * > target_exprs
int hll_size_for_rate(const int err_percent)
Definition: HyperLogLog.h:115
TargetInfo get_target_info(const PointerType target_expr, const bool bigint_count)
Definition: TargetInfo.h:79
#define CHECK_GE(x, y)
Definition: Logger.h:210
SQLTypeInfo get_agg_type(const SQLAgg agg_kind, const Analyzer::Expr *arg_expr)
int g_hll_precision_bits
size_t get_count_distinct_sub_bitmap_count(const size_t bitmap_sz_bits, const RelAlgExecutionUnit &ra_exe_unit, const ExecutorDeviceType device_type)
bool g_bigint_count
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)
Definition: sqldefs.h:76
#define CHECK(condition)
Definition: Logger.h:197
bool g_cluster
Definition: sqltypes.h:44

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::do_table_reordering ( std::vector< InputDescriptor > &  input_descs,
std::list< std::shared_ptr< const InputColDescriptor >> &  input_col_descs,
const JoinQualsPerNestingLevel left_deep_join_quals,
std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const RA *  node,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 3275 of file RelAlgExecutor.cpp.

References cat(), CHECK, g_cluster, get_input_desc(), get_input_nest_levels(), get_node_input_permutation(), and table_is_replicated().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3282  {
3283  if (g_cluster) {
3284  // Disable table reordering in distributed mode. The aggregator does not have enough
3285  // information to break ties
3286  return {};
3287  }
3288  const auto& cat = *executor->getCatalog();
3289  for (const auto& table_info : query_infos) {
3290  if (table_info.table_id < 0) {
3291  continue;
3292  }
3293  const auto td = cat.getMetadataForTable(table_info.table_id);
3294  CHECK(td);
3295  if (table_is_replicated(td)) {
3296  return {};
3297  }
3298  }
3299  const auto input_permutation =
3300  get_node_input_permutation(left_deep_join_quals, query_infos, executor);
3301  input_to_nest_level = get_input_nest_levels(node, input_permutation);
3302  std::tie(input_descs, input_col_descs, std::ignore) =
3303  get_input_desc(node, input_to_nest_level, input_permutation, cat);
3304  return input_permutation;
3305 }
std::unordered_map< const RelAlgNode *, int > get_input_nest_levels(const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
std::string cat(Ts &&...args)
std::vector< node_t > get_node_input_permutation(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor)
bool table_is_replicated(const TableDescriptor *td)
#define CHECK(condition)
Definition: Logger.h:197
bool g_cluster
std::tuple< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > >, std::vector< std::shared_ptr< RexInput > > > get_input_desc(const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::exe_unit_has_quals ( const RelAlgExecutionUnit  ra_exe_unit)
inline

Definition at line 2630 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::join_quals, RelAlgExecutionUnit::quals, and RelAlgExecutionUnit::simple_quals.

Referenced by RelAlgExecutor::executeWorkUnit().

2630  {
2631  return !(ra_exe_unit.quals.empty() && ra_exe_unit.join_quals.empty() &&
2632  ra_exe_unit.simple_quals.empty());
2633 }
const JoinQualsPerNestingLevel join_quals
std::list< std::shared_ptr< Analyzer::Expr > > quals
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::first_oe_is_desc ( const std::list< Analyzer::OrderEntry > &  order_entries)

Definition at line 2402 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2402  {
2403  return !order_entries.empty() && order_entries.front().is_desc;
2404 }

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals ( const RexScalar scalar)

Definition at line 3194 of file RelAlgExecutor.cpp.

References CHECK_EQ, kAND, kBW_EQ, kEQ, kISNULL, kOR, and RexVisitorBase< T >::visit().

Referenced by get_bitwise_equals_conjunction().

3194  {
3195  const auto condition = dynamic_cast<const RexOperator*>(scalar);
3196  if (!condition || condition->getOperator() != kOR || condition->size() != 2) {
3197  return nullptr;
3198  }
3199  const auto equi_join_condition =
3200  dynamic_cast<const RexOperator*>(condition->getOperand(0));
3201  if (!equi_join_condition || equi_join_condition->getOperator() != kEQ) {
3202  return nullptr;
3203  }
3204  const auto both_are_null_condition =
3205  dynamic_cast<const RexOperator*>(condition->getOperand(1));
3206  if (!both_are_null_condition || both_are_null_condition->getOperator() != kAND ||
3207  both_are_null_condition->size() != 2) {
3208  return nullptr;
3209  }
3210  const auto lhs_is_null =
3211  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(0));
3212  const auto rhs_is_null =
3213  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(1));
3214  if (!lhs_is_null || !rhs_is_null || lhs_is_null->getOperator() != kISNULL ||
3215  rhs_is_null->getOperator() != kISNULL) {
3216  return nullptr;
3217  }
3218  CHECK_EQ(size_t(1), lhs_is_null->size());
3219  CHECK_EQ(size_t(1), rhs_is_null->size());
3220  CHECK_EQ(size_t(2), equi_join_condition->size());
3221  const auto eq_lhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(0));
3222  const auto eq_rhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(1));
3223  const auto is_null_lhs = dynamic_cast<const RexInput*>(lhs_is_null->getOperand(0));
3224  const auto is_null_rhs = dynamic_cast<const RexInput*>(rhs_is_null->getOperand(0));
3225  if (!eq_lhs || !eq_rhs || !is_null_lhs || !is_null_rhs) {
3226  return nullptr;
3227  }
3228  std::vector<std::unique_ptr<const RexScalar>> eq_operands;
3229  if (*eq_lhs == *is_null_lhs && *eq_rhs == *is_null_rhs) {
3230  RexDeepCopyVisitor deep_copy_visitor;
3231  auto lhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(0));
3232  auto rhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(1));
3233  eq_operands.emplace_back(lhs_op_copy.release());
3234  eq_operands.emplace_back(rhs_op_copy.release());
3235  return boost::make_unique<const RexOperator>(
3236  kBW_EQ, eq_operands, equi_join_condition->getType());
3237  }
3238  return nullptr;
3239 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
Definition: sqldefs.h:38
Definition: sqldefs.h:30
virtual T visit(const RexScalar *rex_scalar) const
Definition: RexVisitor.h:27
Definition: sqldefs.h:37
Definition: sqldefs.h:31

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals_conjunction ( const RexScalar scalar)

Definition at line 3241 of file RelAlgExecutor.cpp.

References CHECK_GE, get_bitwise_equals(), and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3242  {
3243  const auto condition = dynamic_cast<const RexOperator*>(scalar);
3244  if (condition && condition->getOperator() == kAND) {
3245  CHECK_GE(condition->size(), size_t(2));
3246  auto acc = get_bitwise_equals(condition->getOperand(0));
3247  if (!acc) {
3248  return nullptr;
3249  }
3250  for (size_t i = 1; i < condition->size(); ++i) {
3251  std::vector<std::unique_ptr<const RexScalar>> and_operands;
3252  and_operands.emplace_back(std::move(acc));
3253  and_operands.emplace_back(get_bitwise_equals_conjunction(condition->getOperand(i)));
3254  acc =
3255  boost::make_unique<const RexOperator>(kAND, and_operands, condition->getType());
3256  }
3257  return acc;
3258  }
3259  return get_bitwise_equals(scalar);
3260 }
std::unique_ptr< const RexOperator > get_bitwise_equals_conjunction(const RexScalar *scalar)
#define CHECK_GE(x, y)
Definition: Logger.h:210
Definition: sqldefs.h:37
std::unique_ptr< const RexOperator > get_bitwise_equals(const RexScalar *scalar)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RelAlgNode* anonymous_namespace{RelAlgExecutor.cpp}::get_data_sink ( const RelAlgNode ra_node)

Definition at line 857 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), RelAlgNode::inputCount(), and join().

Referenced by get_input_desc_impl(), get_input_nest_levels(), get_inputs_meta(), get_join_source_used_inputs(), get_join_type(), and get_used_inputs().

857  {
858  if (auto table_func = dynamic_cast<const RelTableFunction*>(ra_node)) {
859  return table_func;
860  }
861  if (auto join = dynamic_cast<const RelJoin*>(ra_node)) {
862  CHECK_EQ(size_t(2), join->inputCount());
863  return join;
864  }
865  if (!dynamic_cast<const RelLogicalUnion*>(ra_node)) {
866  CHECK_EQ(size_t(1), ra_node->inputCount());
867  }
868  auto only_src = ra_node->getInput(0);
869  const bool is_join = dynamic_cast<const RelJoin*>(only_src) ||
870  dynamic_cast<const RelLeftDeepInnerJoin*>(only_src);
871  return is_join ? only_src : ra_node;
872 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::tuple<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> >, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc ( const RA *  ra_node,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 1148 of file RelAlgExecutor.cpp.

References get_input_desc_impl(), get_used_inputs(), and VLOG.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and do_table_reordering().

1151  {
1152  std::unordered_set<const RexInput*> used_inputs;
1153  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
1154  std::tie(used_inputs, used_inputs_owned) = get_used_inputs(ra_node, cat);
1155  VLOG(3) << "used_inputs.size() = " << used_inputs.size();
1156  auto input_desc_pair = get_input_desc_impl(
1157  ra_node, used_inputs, input_to_nest_level, input_permutation, cat);
1158  return std::make_tuple(
1159  input_desc_pair.first, input_desc_pair.second, used_inputs_owned);
1160 }
std::pair< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > > > get_input_desc_impl(const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs(const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
#define VLOG(n)
Definition: Logger.h:291

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::pair<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc_impl ( const RA *  ra_node,
const std::unordered_set< const RexInput * > &  used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 1089 of file RelAlgExecutor.cpp.

References collect_used_input_desc(), get_data_sink(), get_join_source_used_inputs(), InputDescriptor::getNestLevel(), gpu_enabled::sort(), and table_id_from_ra().

Referenced by get_input_desc().

1093  {
1094  std::vector<InputDescriptor> input_descs;
1095  const auto data_sink_node = get_data_sink(ra_node);
1096  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
1097  const auto input_node_idx =
1098  input_permutation.empty() ? input_idx : input_permutation[input_idx];
1099  auto input_ra = data_sink_node->getInput(input_node_idx);
1100  const int table_id = table_id_from_ra(input_ra);
1101  input_descs.emplace_back(table_id, input_idx);
1102  }
1103  std::sort(input_descs.begin(),
1104  input_descs.end(),
1105  [](const InputDescriptor& lhs, const InputDescriptor& rhs) {
1106  return lhs.getNestLevel() < rhs.getNestLevel();
1107  });
1108  std::unordered_set<std::shared_ptr<const InputColDescriptor>> input_col_descs_unique;
1109  collect_used_input_desc(input_descs,
1110  cat,
1111  input_col_descs_unique, // modified
1112  ra_node,
1113  used_inputs,
1114  input_to_nest_level);
1115  std::unordered_set<const RexInput*> join_source_used_inputs;
1116  std::vector<std::shared_ptr<RexInput>> join_source_used_inputs_owned;
1117  std::tie(join_source_used_inputs, join_source_used_inputs_owned) =
1118  get_join_source_used_inputs(ra_node, cat);
1119  collect_used_input_desc(input_descs,
1120  cat,
1121  input_col_descs_unique, // modified
1122  ra_node,
1123  join_source_used_inputs,
1124  input_to_nest_level);
1125  std::vector<std::shared_ptr<const InputColDescriptor>> input_col_descs(
1126  input_col_descs_unique.begin(), input_col_descs_unique.end());
1127 
1128  std::sort(input_col_descs.begin(),
1129  input_col_descs.end(),
1130  [](std::shared_ptr<const InputColDescriptor> const& lhs,
1131  std::shared_ptr<const InputColDescriptor> const& rhs) {
1132  return std::make_tuple(lhs->getScanDesc().getNestLevel(),
1133  lhs->getColId(),
1134  lhs->getScanDesc().getTableId()) <
1135  std::make_tuple(rhs->getScanDesc().getNestLevel(),
1136  rhs->getColId(),
1137  rhs->getScanDesc().getTableId());
1138  });
1139  return {input_descs,
1140  std::list<std::shared_ptr<const InputColDescriptor>>(input_col_descs.begin(),
1141  input_col_descs.end())};
1142 }
void collect_used_input_desc(std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
DEVICE void sort(ARGS &&...args)
Definition: gpu_enabled.h:105
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_join_source_used_inputs(const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
int table_id_from_ra(const RelAlgNode *ra_node)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
int getNestLevel() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_map<const RelAlgNode*, int> anonymous_namespace{RelAlgExecutor.cpp}::get_input_nest_levels ( const RelAlgNode ra_node,
const std::vector< size_t > &  input_permutation 
)

Definition at line 994 of file RelAlgExecutor.cpp.

References CHECK, get_data_sink(), logger::INFO, and LOG_IF.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), and do_table_reordering().

996  {
997  const auto data_sink_node = get_data_sink(ra_node);
998  std::unordered_map<const RelAlgNode*, int> input_to_nest_level;
999  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
1000  const auto input_node_idx =
1001  input_permutation.empty() ? input_idx : input_permutation[input_idx];
1002  const auto input_ra = data_sink_node->getInput(input_node_idx);
1003  // Having a non-zero mapped value (input_idx) results in the query being interpretted
1004  // as a JOIN within CodeGenerator::codegenColVar() due to rte_idx being set to the
1005  // mapped value (input_idx) which originates here. This would be incorrect for UNION.
1006  size_t const idx = dynamic_cast<const RelLogicalUnion*>(ra_node) ? 0 : input_idx;
1007  const auto it_ok = input_to_nest_level.emplace(input_ra, idx);
1008  CHECK(it_ok.second);
1009  LOG_IF(INFO, !input_permutation.empty())
1010  << "Assigned input " << input_ra->toString() << " to nest level " << input_idx;
1011  }
1012  return input_to_nest_level;
1013 }
#define LOG_IF(severity, condition)
Definition: Logger.h:287
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::vector<TargetMetaInfo>, std::vector<std::shared_ptr<Analyzer::Expr> > > anonymous_namespace{RelAlgExecutor.cpp}::get_inputs_meta ( const RelFilter filter,
const RelAlgTranslator translator,
const std::vector< std::shared_ptr< RexInput >> &  inputs_owned,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3937 of file RelAlgExecutor.cpp.

References CHECK, get_data_sink(), get_exprs_not_owned(), get_targets_meta(), synthesize_inputs(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createFilterWorkUnit().

3940  {
3941  std::vector<TargetMetaInfo> in_metainfo;
3942  std::vector<std::shared_ptr<Analyzer::Expr>> exprs_owned;
3943  const auto data_sink_node = get_data_sink(filter);
3944  auto input_it = inputs_owned.begin();
3945  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
3946  const auto source = data_sink_node->getInput(nest_level);
3947  const auto scan_source = dynamic_cast<const RelScan*>(source);
3948  if (scan_source) {
3949  CHECK(source->getOutputMetainfo().empty());
3950  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources_owned;
3951  for (size_t i = 0; i < scan_source->size(); ++i, ++input_it) {
3952  scalar_sources_owned.push_back(translator.translateScalarRex(input_it->get()));
3953  }
3954  const auto source_metadata =
3955  get_targets_meta(scan_source, get_exprs_not_owned(scalar_sources_owned));
3956  in_metainfo.insert(
3957  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3958  exprs_owned.insert(
3959  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3960  } else {
3961  const auto& source_metadata = source->getOutputMetainfo();
3962  input_it += source_metadata.size();
3963  in_metainfo.insert(
3964  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3965  const auto scalar_sources_owned = synthesize_inputs(
3966  data_sink_node, nest_level, source_metadata, input_to_nest_level);
3967  exprs_owned.insert(
3968  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3969  }
3970  }
3971  return std::make_pair(in_metainfo, exprs_owned);
3972 }
std::vector< std::shared_ptr< Analyzer::Expr > > synthesize_inputs(const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
std::vector< Analyzer::Expr * > get_exprs_not_owned(const std::vector< std::shared_ptr< Analyzer::Expr >> &exprs)
Definition: Execute.h:259
std::vector< TargetMetaInfo > get_targets_meta(const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_join_source_used_inputs ( const RelAlgNode ra_node,
const Catalog_Namespace::Catalog cat 
)

Definition at line 1016 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, CHECK_GT, get_data_sink(), anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelAlgNode::inputCount(), join(), run_benchmark_import::result, RelAlgNode::toString(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc_impl().

1017  {
1018  const auto data_sink_node = get_data_sink(ra_node);
1019  if (auto join = dynamic_cast<const RelJoin*>(data_sink_node)) {
1020  CHECK_EQ(join->inputCount(), 2u);
1021  const auto condition = join->getCondition();
1022  RexUsedInputsVisitor visitor(cat);
1023  auto condition_inputs = visitor.visit(condition);
1024  std::vector<std::shared_ptr<RexInput>> condition_inputs_owned(
1025  visitor.get_inputs_owned());
1026  return std::make_pair(condition_inputs, condition_inputs_owned);
1027  }
1028 
1029  if (auto left_deep_join = dynamic_cast<const RelLeftDeepInnerJoin*>(data_sink_node)) {
1030  CHECK_GE(left_deep_join->inputCount(), 2u);
1031  const auto condition = left_deep_join->getInnerCondition();
1032  RexUsedInputsVisitor visitor(cat);
1033  auto result = visitor.visit(condition);
1034  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
1035  ++nesting_level) {
1036  const auto outer_condition = left_deep_join->getOuterCondition(nesting_level);
1037  if (outer_condition) {
1038  const auto outer_result = visitor.visit(outer_condition);
1039  result.insert(outer_result.begin(), outer_result.end());
1040  }
1041  }
1042  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
1043  return std::make_pair(result, used_inputs_owned);
1044  }
1045 
1046  if (dynamic_cast<const RelLogicalUnion*>(ra_node)) {
1047  CHECK_GT(ra_node->inputCount(), 1u) << ra_node->toString();
1048  } else if (dynamic_cast<const RelTableFunction*>(ra_node)) {
1049  CHECK_GT(ra_node->inputCount(), 0u) << ra_node->toString();
1050  } else {
1051  CHECK_EQ(ra_node->inputCount(), 1u) << ra_node->toString();
1052  }
1053  return std::make_pair(std::unordered_set<const RexInput*>{},
1054  std::vector<std::shared_ptr<RexInput>>{});
1055 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
std::string join(T const &container, std::string const &delim)
#define CHECK_GE(x, y)
Definition: Logger.h:210
#define CHECK_GT(x, y)
Definition: Logger.h:209
virtual std::string toString() const =0
const size_t inputCount() const
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

JoinType anonymous_namespace{RelAlgExecutor.cpp}::get_join_type ( const RelAlgNode ra)

Definition at line 3182 of file RelAlgExecutor.cpp.

References get_data_sink(), INNER, INVALID, and join().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3182  {
3183  auto sink = get_data_sink(ra);
3184  if (auto join = dynamic_cast<const RelJoin*>(sink)) {
3185  return join->getJoinType();
3186  }
3187  if (dynamic_cast<const RelLeftDeepInnerJoin*>(sink)) {
3188  return JoinType::INNER;
3189  }
3190 
3191  return JoinType::INVALID;
3192 }
std::string join(T const &container, std::string const &delim)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::get_left_deep_join_input_sizes ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 3307 of file RelAlgExecutor.cpp.

References get_node_output(), RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

3308  {
3309  std::vector<size_t> input_sizes;
3310  for (size_t i = 0; i < left_deep_join->inputCount(); ++i) {
3311  const auto inputs = get_node_output(left_deep_join->getInput(i));
3312  input_sizes.push_back(inputs.size());
3313  }
3314  return input_sizes;
3315 }
const RelAlgNode * getInput(const size_t idx) const
RANodeOutput get_node_output(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

SQLTypeInfo anonymous_namespace{RelAlgExecutor.cpp}::get_logical_type_for_expr ( const Analyzer::Expr expr)
inline

Definition at line 1403 of file RelAlgExecutor.cpp.

References get_logical_type_info(), get_nullable_logical_type_info(), Analyzer::Expr::get_type_info(), is_agg(), is_count_distinct(), and kBIGINT.

Referenced by get_targets_meta().

1403  {
1404  if (is_count_distinct(&expr)) {
1405  return SQLTypeInfo(kBIGINT, false);
1406  } else if (is_agg(&expr)) {
1408  }
1409  return get_logical_type_info(expr.get_type_info());
1410 }
bool is_agg(const Analyzer::Expr *expr)
SQLTypeInfo get_nullable_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:914
SQLTypeInfo get_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:893
bool is_count_distinct(const Analyzer::Expr *expr)
const SQLTypeInfo & get_type_info() const
Definition: Analyzer.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<Analyzer::OrderEntry> anonymous_namespace{RelAlgExecutor.cpp}::get_order_entries ( const RelSort sort)

Definition at line 2382 of file RelAlgExecutor.cpp.

References RelSort::collationCount(), Descending, First, RelSort::getCollation(), and run_benchmark_import::result.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

2382  {
2383  std::list<Analyzer::OrderEntry> result;
2384  for (size_t i = 0; i < sort->collationCount(); ++i) {
2385  const auto sort_field = sort->getCollation(i);
2386  result.emplace_back(sort_field.getField() + 1,
2387  sort_field.getSortDir() == SortDirection::Descending,
2388  sort_field.getNullsPosition() == NullSortedPosition::First);
2389  }
2390  return result;
2391 }
SortField getCollation(const size_t i) const
size_t collationCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_set<PhysicalInput> anonymous_namespace{RelAlgExecutor.cpp}::get_physical_inputs ( const Catalog_Namespace::Catalog cat,
const RelAlgNode ra 
)

Definition at line 61 of file RelAlgExecutor.cpp.

References get_physical_inputs(), and Catalog_Namespace::Catalog::getColumnIdBySpi().

63  {
64  auto phys_inputs = get_physical_inputs(ra);
65  std::unordered_set<PhysicalInput> phys_inputs2;
66  for (auto& phi : phys_inputs) {
67  phys_inputs2.insert(
68  PhysicalInput{cat.getColumnIdBySpi(phi.table_id, phi.col_id), phi.table_id});
69  }
70  return phys_inputs2;
71 }
std::unordered_set< PhysicalInput > get_physical_inputs(const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1538

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelCompound compound)

Definition at line 1162 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSourcesSize().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1162  {
1163  return compound->getScalarSourcesSize();
1164 }
const size_t getScalarSourcesSize() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelProject project)

Definition at line 1166 of file RelAlgExecutor.cpp.

References RelProject::size().

1166  {
1167  return project->size();
1168 }
size_t size() const override

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelTableFunction table_func)

Definition at line 1170 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputsSize().

1170  {
1171  return table_func->getTableFuncInputsSize();
1172 }
size_t getTableFuncInputsSize() const

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scan_limit ( const RelAlgNode ra,
const size_t  limit 
)

Definition at line 2393 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit().

2393  {
2394  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
2395  if (aggregate) {
2396  return 0;
2397  }
2398  const auto compound = dynamic_cast<const RelCompound*>(ra);
2399  return (compound && compound->isAggregate()) ? 0 : limit;
2400 }

+ Here is the caller graph for this function:

template<class RA >
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RA *  ra_node,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1413 of file RelAlgExecutor.cpp.

References CHECK, CHECK_EQ, and get_logical_type_for_expr().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), RelAlgExecutor::createUnionWorkUnit(), get_inputs_meta(), and get_targets_meta().

1415  {
1416  std::vector<TargetMetaInfo> targets_meta;
1417  CHECK_EQ(ra_node->size(), target_exprs.size());
1418  for (size_t i = 0; i < ra_node->size(); ++i) {
1419  CHECK(target_exprs[i]);
1420  // TODO(alex): remove the count distinct type fixup.
1421  targets_meta.emplace_back(ra_node->getFieldName(i),
1422  get_logical_type_for_expr(*target_exprs[i]),
1423  target_exprs[i]->get_type_info());
1424  }
1425  return targets_meta;
1426 }
#define CHECK_EQ(x, y)
Definition: Logger.h:205
SQLTypeInfo get_logical_type_for_expr(const Analyzer::Expr &expr)
#define CHECK(condition)
Definition: Logger.h:197

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<>
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RelFilter filter,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1429 of file RelAlgExecutor.cpp.

References get_targets_meta(), RelAlgNode::getInput(), RelAlgNode::toString(), and UNREACHABLE.

1431  {
1432  RelAlgNode const* input0 = filter->getInput(0);
1433  if (auto const* input = dynamic_cast<RelCompound const*>(input0)) {
1434  return get_targets_meta(input, target_exprs);
1435  } else if (auto const* input = dynamic_cast<RelProject const*>(input0)) {
1436  return get_targets_meta(input, target_exprs);
1437  } else if (auto const* input = dynamic_cast<RelLogicalUnion const*>(input0)) {
1438  return get_targets_meta(input, target_exprs);
1439  } else if (auto const* input = dynamic_cast<RelAggregate const*>(input0)) {
1440  return get_targets_meta(input, target_exprs);
1441  } else if (auto const* input = dynamic_cast<RelScan const*>(input0)) {
1442  return get_targets_meta(input, target_exprs);
1443  }
1444  UNREACHABLE() << "Unhandled node type: " << input0->toString();
1445  return {};
1446 }
#define UNREACHABLE()
Definition: Logger.h:241
const RelAlgNode * getInput(const size_t idx) const
std::vector< TargetMetaInfo > get_targets_meta(const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
virtual std::string toString() const =0

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelCompound compound,
const Catalog_Namespace::Catalog cat 
)

Definition at line 875 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelCompound::getFilterExpr(), RelCompound::getScalarSource(), RelCompound::getScalarSourcesSize(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc().

875  {
876  RexUsedInputsVisitor visitor(cat);
877  const auto filter_expr = compound->getFilterExpr();
878  std::unordered_set<const RexInput*> used_inputs =
879  filter_expr ? visitor.visit(filter_expr) : std::unordered_set<const RexInput*>{};
880  const auto sources_size = compound->getScalarSourcesSize();
881  for (size_t i = 0; i < sources_size; ++i) {
882  const auto source_inputs = visitor.visit(compound->getScalarSource(i));
883  used_inputs.insert(source_inputs.begin(), source_inputs.end());
884  }
885  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
886  return std::make_pair(used_inputs, used_inputs_owned);
887 }
const RexScalar * getFilterExpr() const
const size_t getScalarSourcesSize() const
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelAggregate aggregate,
const Catalog_Namespace::Catalog cat 
)

Definition at line 890 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, RelAggregate::getAggExprs(), RelAggregate::getGroupByCount(), RelAlgNode::getInput(), RelAlgNode::getOutputMetainfo(), and RelAlgNode::inputCount().

890  {
891  CHECK_EQ(size_t(1), aggregate->inputCount());
892  std::unordered_set<const RexInput*> used_inputs;
893  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
894  const auto source = aggregate->getInput(0);
895  const auto& in_metainfo = source->getOutputMetainfo();
896  const auto group_count = aggregate->getGroupByCount();
897  CHECK_GE(in_metainfo.size(), group_count);
898  for (size_t i = 0; i < group_count; ++i) {
899  auto synthesized_used_input = new RexInput(source, i);
900  used_inputs_owned.emplace_back(synthesized_used_input);
901  used_inputs.insert(synthesized_used_input);
902  }
903  for (const auto& agg_expr : aggregate->getAggExprs()) {
904  for (size_t i = 0; i < agg_expr->size(); ++i) {
905  const auto operand_idx = agg_expr->getOperand(i);
906  CHECK_GE(in_metainfo.size(), static_cast<size_t>(operand_idx));
907  auto synthesized_used_input = new RexInput(source, operand_idx);
908  used_inputs_owned.emplace_back(synthesized_used_input);
909  used_inputs.insert(synthesized_used_input);
910  }
911  }
912  return std::make_pair(used_inputs, used_inputs_owned);
913 }
const size_t getGroupByCount() const
#define CHECK_EQ(x, y)
Definition: Logger.h:205
#define CHECK_GE(x, y)
Definition: Logger.h:210
const RelAlgNode * getInput(const size_t idx) const
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
const size_t inputCount() const
const std::vector< TargetMetaInfo > & getOutputMetainfo() const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelProject project,
const Catalog_Namespace::Catalog cat 
)

Definition at line 916 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelProject::getProjectAt(), RelProject::size(), and RexVisitorBase< T >::visit().

916  {
917  RexUsedInputsVisitor visitor(cat);
918  std::unordered_set<const RexInput*> used_inputs;
919  for (size_t i = 0; i < project->size(); ++i) {
920  const auto proj_inputs = visitor.visit(project->getProjectAt(i));
921  used_inputs.insert(proj_inputs.begin(), proj_inputs.end());
922  }
923  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
924  return std::make_pair(used_inputs, used_inputs_owned);
925 }
size_t size() const override
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelTableFunction table_func,
const Catalog_Namespace::Catalog cat 
)

Definition at line 928 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelTableFunction::getTableFuncInputAt(), RelTableFunction::getTableFuncInputsSize(), and RexVisitorBase< T >::visit().

929  {
930  RexUsedInputsVisitor visitor(cat);
931  std::unordered_set<const RexInput*> used_inputs;
932  for (size_t i = 0; i < table_func->getTableFuncInputsSize(); ++i) {
933  const auto table_func_inputs = visitor.visit(table_func->getTableFuncInputAt(i));
934  used_inputs.insert(table_func_inputs.begin(), table_func_inputs.end());
935  }
936  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
937  return std::make_pair(used_inputs, used_inputs_owned);
938 }
size_t getTableFuncInputsSize() const
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelFilter filter,
const Catalog_Namespace::Catalog cat 
)

Definition at line 941 of file RelAlgExecutor.cpp.

References CHECK, and get_data_sink().

941  {
942  std::unordered_set<const RexInput*> used_inputs;
943  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
944  const auto data_sink_node = get_data_sink(filter);
945  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
946  const auto source = data_sink_node->getInput(nest_level);
947  const auto scan_source = dynamic_cast<const RelScan*>(source);
948  if (scan_source) {
949  CHECK(source->getOutputMetainfo().empty());
950  for (size_t i = 0; i < scan_source->size(); ++i) {
951  auto synthesized_used_input = new RexInput(scan_source, i);
952  used_inputs_owned.emplace_back(synthesized_used_input);
953  used_inputs.insert(synthesized_used_input);
954  }
955  } else {
956  const auto& partial_in_metadata = source->getOutputMetainfo();
957  for (size_t i = 0; i < partial_in_metadata.size(); ++i) {
958  auto synthesized_used_input = new RexInput(source, i);
959  used_inputs_owned.emplace_back(synthesized_used_input);
960  used_inputs.insert(synthesized_used_input);
961  }
962  }
963  }
964  return std::make_pair(used_inputs, used_inputs_owned);
965 }
#define CHECK(condition)
Definition: Logger.h:197
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelLogicalUnion logical_union,
const Catalog_Namespace::Catalog  
)

Definition at line 968 of file RelAlgExecutor.cpp.

References RelAlgNode::getInput(), RelAlgNode::inputCount(), and VLOG.

968  {
969  std::unordered_set<const RexInput*> used_inputs(logical_union->inputCount());
970  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
971  used_inputs_owned.reserve(logical_union->inputCount());
972  VLOG(3) << "logical_union->inputCount()=" << logical_union->inputCount();
973  auto const n_inputs = logical_union->inputCount();
974  for (size_t nest_level = 0; nest_level < n_inputs; ++nest_level) {
975  auto input = logical_union->getInput(nest_level);
976  for (size_t i = 0; i < input->size(); ++i) {
977  used_inputs_owned.emplace_back(std::make_shared<RexInput>(input, i));
978  used_inputs.insert(used_inputs_owned.back().get());
979  }
980  }
981  return std::make_pair(std::move(used_inputs), std::move(used_inputs_owned));
982 }
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const
#define VLOG(n)
Definition: Logger.h:291

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::groups_approx_upper_bound ( const std::vector< InputTableInfo > &  table_infos)

Upper bound estimation for the number of groups. Not strictly correct and not tight, but if the tables involved are really small we shouldn't waste time doing the NDV estimation. We don't account for cross-joins and / or group by unnested array, which is the reason this estimation isn't entirely reliable.

Definition at line 2599 of file RelAlgExecutor.cpp.

References CHECK.

Referenced by RelAlgExecutor::executeWorkUnit().

2599  {
2600  CHECK(!table_infos.empty());
2601  const auto& first_table = table_infos.front();
2602  size_t max_num_groups = first_table.info.getNumTuplesUpperBound();
2603  for (const auto& table_info : table_infos) {
2604  if (table_info.info.getNumTuplesUpperBound() > max_num_groups) {
2605  max_num_groups = table_info.info.getNumTuplesUpperBound();
2606  }
2607  }
2608  return std::max(max_num_groups, size_t(1));
2609 }
#define CHECK(condition)
Definition: Logger.h:197

+ Here is the caller graph for this function:

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const std::string &  columnName,
const SQLTypeInfo columnType,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 2017 of file RelAlgExecutor.cpp.

References CHECK, logger::ERROR, SQLTypeInfo::get_comp_param(), Analyzer::Constant::get_constval(), Analyzer::Constant::get_is_null(), Catalog_Namespace::Catalog::getMetadataForDict(), inline_fixed_encoding_null_val(), LOG, Datum::stringval, and omnisci.dtypes::T.

Referenced by RelAlgExecutor::executeSimpleInsert(), and insert_one_dict_str().

2021  {
2022  if (col_cv->get_is_null()) {
2023  *col_data = inline_fixed_encoding_null_val(columnType);
2024  } else {
2025  const int dict_id = columnType.get_comp_param();
2026  const auto col_datum = col_cv->get_constval();
2027  const auto& str = *col_datum.stringval;
2028  const auto dd = catalog.getMetadataForDict(dict_id);
2029  CHECK(dd && dd->stringDict);
2030  int32_t str_id = dd->stringDict->getOrAdd(str);
2031  if (!dd->dictIsTemp) {
2032  const auto checkpoint_ok = dd->stringDict->checkpoint();
2033  if (!checkpoint_ok) {
2034  throw std::runtime_error("Failed to checkpoint dictionary for column " +
2035  columnName);
2036  }
2037  }
2038  const bool invalid = str_id > max_valid_int_value<T>();
2039  if (invalid || str_id == inline_int_null_value<int32_t>()) {
2040  if (invalid) {
2041  LOG(ERROR) << "Could not encode string: " << str
2042  << ", the encoded value doesn't fit in " << sizeof(T) * 8
2043  << " bits. Will store NULL instead.";
2044  }
2045  str_id = inline_fixed_encoding_null_val(columnType);
2046  }
2047  *col_data = str_id;
2048  }
2049  return *col_data;
2050 }
#define LOG(tag)
Definition: Logger.h:188
bool get_is_null() const
Definition: Analyzer.h:334
const DictDescriptor * getMetadataForDict(int dict_ref, bool loadDict=true) const
Definition: Catalog.cpp:1439
std::string * stringval
Definition: sqltypes.h:211
Datum get_constval() const
Definition: Analyzer.h:335
HOST DEVICE int get_comp_param() const
Definition: sqltypes.h:320
#define CHECK(condition)
Definition: Logger.h:197
int64_t inline_fixed_encoding_null_val(const SQL_TYPE_INFO &ti)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class T >
int64_t anonymous_namespace{RelAlgExecutor.cpp}::insert_one_dict_str ( T *  col_data,
const ColumnDescriptor cd,
const Analyzer::Constant col_cv,
const Catalog_Namespace::Catalog catalog 
)

Definition at line 2053 of file RelAlgExecutor.cpp.

References ColumnDescriptor::columnName, ColumnDescriptor::columnType, and insert_one_dict_str().

2056  {
2057  return insert_one_dict_str(col_data, cd->columnName, cd->columnType, col_cv, catalog);
2058 }
int64_t insert_one_dict_str(T *col_data, const std::string &columnName, const SQLTypeInfo &columnType, const Analyzer::Constant *col_cv, const Catalog_Namespace::Catalog &catalog)
SQLTypeInfo columnType
std::string columnName

+ Here is the call graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_agg ( const Analyzer::Expr expr)

Definition at line 1391 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_aggtype(), kAVG, kMAX, kMIN, and kSUM.

Referenced by anonymous_namespace{RelAlgDagBuilder.cpp}::create_compound(), RelAlgExecutor::executeWorkUnit(), get_logical_type_for_expr(), and ResultSet::getSingleSlotTargetBitmap().

1391  {
1392  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1393  if (agg_expr && agg_expr->get_contains_agg()) {
1394  auto agg_type = agg_expr->get_aggtype();
1395  if (agg_type == SQLAgg::kMIN || agg_type == SQLAgg::kMAX ||
1396  agg_type == SQLAgg::kSUM || agg_type == SQLAgg::kAVG) {
1397  return true;
1398  }
1399  }
1400  return false;
1401 }
Definition: sqldefs.h:73
Definition: sqldefs.h:75
SQLAgg get_aggtype() const
Definition: Analyzer.h:1095
Definition: sqldefs.h:74
Definition: sqldefs.h:72

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_count_distinct ( const Analyzer::Expr expr)

Definition at line 1386 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_is_distinct().

Referenced by get_logical_type_for_expr().

1386  {
1387  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1388  return agg_expr && agg_expr->get_is_distinct();
1389 }
bool get_is_distinct() const
Definition: Analyzer.h:1098

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_metadata_placeholder ( const ChunkMetadata metadata)

Definition at line 73 of file RelAlgExecutor.cpp.

References CHECK, ChunkMetadata::chunkStats, Datum::intval, SQLTypeInfo::is_dict_encoded_type(), ChunkStats::max, ChunkStats::min, and ChunkMetadata::sqlType.

Referenced by prepare_foreign_table_for_execution().

73  {
74  // Only supported type for now
76  return metadata.chunkStats.min.intval > metadata.chunkStats.max.intval;
77 }
int32_t intval
Definition: sqltypes.h:205
ChunkStats chunkStats
Definition: ChunkMetadata.h:35
bool is_dict_encoded_type() const
Definition: sqltypes.h:516
#define CHECK(condition)
Definition: Logger.h:197
SQLTypeInfo sqlType
Definition: ChunkMetadata.h:32

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_window_execution_unit ( const RelAlgExecutionUnit ra_exe_unit)

Definition at line 1676 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

1676  {
1677  return std::any_of(ra_exe_unit.target_exprs.begin(),
1678  ra_exe_unit.target_exprs.end(),
1679  [](const Analyzer::Expr* expr) {
1680  return dynamic_cast<const Analyzer::WindowFunction*>(expr);
1681  });
1682 }
std::vector< Analyzer::Expr * > target_exprs

+ Here is the caller graph for this function:

std::vector<JoinType> anonymous_namespace{RelAlgExecutor.cpp}::left_deep_join_types ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 3262 of file RelAlgExecutor.cpp.

References CHECK_GE, RelLeftDeepInnerJoin::getOuterCondition(), INNER, RelAlgNode::inputCount(), and LEFT.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::translateLeftDeepJoinFilter().

3262  {
3263  CHECK_GE(left_deep_join->inputCount(), size_t(2));
3264  std::vector<JoinType> join_types(left_deep_join->inputCount() - 1, JoinType::INNER);
3265  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
3266  ++nesting_level) {
3267  if (left_deep_join->getOuterCondition(nesting_level)) {
3268  join_types[nesting_level - 1] = JoinType::LEFT;
3269  }
3270  }
3271  return join_types;
3272 }
const RexScalar * getOuterCondition(const size_t nesting_level) const
#define CHECK_GE(x, y)
Definition: Logger.h:210
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class QualsList >
bool anonymous_namespace{RelAlgExecutor.cpp}::list_contains_expression ( const QualsList &  haystack,
const std::shared_ptr< Analyzer::Expr > &  needle 
)

Definition at line 3441 of file RelAlgExecutor.cpp.

Referenced by reverse_logical_distribution().

3442  {
3443  for (const auto& qual : haystack) {
3444  if (*qual == *needle) {
3445  return true;
3446  }
3447  }
3448  return false;
3449 }

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::node_is_aggregate ( const RelAlgNode ra)

Definition at line 55 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

55  {
56  const auto compound = dynamic_cast<const RelCompound*>(ra);
57  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
58  return ((compound && compound->isAggregate()) || aggregate);
59 }

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::prepare_foreign_table_for_execution ( const RelAlgNode ra_node,
int  database_id 
)

Definition at line 79 of file RelAlgExecutor.cpp.

References CHECK, Catalog_Namespace::SysCatalog::checkedGetCatalog(), Data_Namespace::CPU_LEVEL, StorageType::FOREIGN_TABLE, get_physical_inputs(), Chunk_NS::Chunk::getChunk(), Catalog_Namespace::SysCatalog::instance(), and is_metadata_placeholder().

Referenced by RelAlgExecutor::executeRelAlgQueryNoRetry(), and RelAlgExecutor::executeRelAlgStep().

79  {
80  // Iterate through ra_node inputs for types that need to be loaded pre-execution
81  // If they do not have valid metadata, load them into CPU memory to generate
82  // the metadata and leave them ready to be used by the query
83  auto catalog = Catalog_Namespace::SysCatalog::instance().checkedGetCatalog(database_id);
84 
85  // provide ForeignStorageMgr with all columns needed for this node
86  std::map<ChunkKey, std::vector<int>> columns_per_table;
87  for (const auto& physical_input : get_physical_inputs(&ra_node)) {
88  int table_id = physical_input.table_id;
89  auto table = catalog->getMetadataForTable(table_id, false);
90  if (table && table->storageType == StorageType::FOREIGN_TABLE) {
91  int col_id = catalog->getColumnIdBySpi(table_id, physical_input.col_id);
92  columns_per_table[{database_id, table_id}].push_back(col_id);
93  }
94  }
95  if (columns_per_table.size() > 0) {
96  CHECK(catalog->getDataMgr().getPersistentStorageMgr()->getForeignStorageMgr() !=
97  nullptr);
98  catalog->getDataMgr()
99  .getPersistentStorageMgr()
100  ->getForeignStorageMgr()
101  ->setColumnHints(columns_per_table);
102  }
103  for (const auto& physical_input : get_physical_inputs(&ra_node)) {
104  int table_id = physical_input.table_id;
105  auto table = catalog->getMetadataForTable(table_id, false);
106  if (table && table->storageType == StorageType::FOREIGN_TABLE) {
107  int col_id = catalog->getColumnIdBySpi(table_id, physical_input.col_id);
108  const auto col_desc = catalog->getMetadataForColumn(table_id, col_id);
109  auto foreign_table = catalog->getForeignTable(table_id);
110  if (col_desc->columnType.is_dict_encoded_type()) {
111  CHECK(foreign_table->fragmenter != nullptr);
112  for (const auto& fragment :
113  foreign_table->fragmenter->getFragmentsForQuery().fragments) {
114  ChunkKey chunk_key = {database_id, table_id, col_id, fragment.fragmentId};
115  const ChunkMetadataMap& metadata_map = fragment.getChunkMetadataMap();
116  CHECK(metadata_map.find(col_id) != metadata_map.end());
117  if (is_metadata_placeholder(*(metadata_map.at(col_id)))) {
118  // When this goes out of scope it will stay in CPU cache but become
119  // evictable
120  std::shared_ptr<Chunk_NS::Chunk> chunk =
121  Chunk_NS::Chunk::getChunk(col_desc,
122  &(catalog->getDataMgr()),
123  chunk_key,
125  0,
126  0,
127  0);
128  }
129  }
130  }
131  }
132  }
133 }
std::vector< int > ChunkKey
Definition: types.h:37
bool is_metadata_placeholder(const ChunkMetadata &metadata)
std::map< int, std::shared_ptr< ChunkMetadata >> ChunkMetadataMap
static SysCatalog & instance()
Definition: SysCatalog.h:288
static std::shared_ptr< Chunk > getChunk(const ColumnDescriptor *cd, DataMgr *data_mgr, const ChunkKey &key, const MemoryLevel mem_level, const int deviceId, const size_t num_bytes, const size_t num_elems)
Definition: Chunk.cpp:28
std::shared_ptr< Catalog > checkedGetCatalog(const int32_t db_id)
#define CHECK(condition)
Definition: Logger.h:197
std::unordered_set< PhysicalInput > get_physical_inputs(const RelAlgNode *ra)
static constexpr char const * FOREIGN_TABLE

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::reverse_logical_distribution ( const std::shared_ptr< Analyzer::Expr > &  expr)

Definition at line 3454 of file RelAlgExecutor.cpp.

References build_logical_expression(), CHECK_GE, kAND, kONE, kOR, list_contains_expression(), Parser::OperExpr::normalize(), qual_to_conjunctive_form(), and qual_to_disjunctive_form().

Referenced by RelAlgExecutor::makeJoinQuals().

3455  {
3456  const auto expr_terms = qual_to_disjunctive_form(expr);
3457  CHECK_GE(expr_terms.size(), size_t(1));
3458  const auto& first_term = expr_terms.front();
3459  const auto first_term_factors = qual_to_conjunctive_form(first_term);
3460  std::vector<std::shared_ptr<Analyzer::Expr>> common_factors;
3461  // First, collect the conjunctive components common to all the disjunctive components.
3462  // Don't do it for simple qualifiers, we only care about expensive or join qualifiers.
3463  for (const auto& first_term_factor : first_term_factors.quals) {
3464  bool is_common =
3465  expr_terms.size() > 1; // Only report common factors for disjunction.
3466  for (size_t i = 1; i < expr_terms.size(); ++i) {
3467  const auto crt_term_factors = qual_to_conjunctive_form(expr_terms[i]);
3468  if (!list_contains_expression(crt_term_factors.quals, first_term_factor)) {
3469  is_common = false;
3470  break;
3471  }
3472  }
3473  if (is_common) {
3474  common_factors.push_back(first_term_factor);
3475  }
3476  }
3477  if (common_factors.empty()) {
3478  return expr;
3479  }
3480  // Now that the common expressions are known, collect the remaining expressions.
3481  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_terms;
3482  for (const auto& term : expr_terms) {
3483  const auto term_cf = qual_to_conjunctive_form(term);
3484  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_quals(
3485  term_cf.simple_quals.begin(), term_cf.simple_quals.end());
3486  for (const auto& qual : term_cf.quals) {
3487  if (!list_contains_expression(common_factors, qual)) {
3488  remaining_quals.push_back(qual);
3489  }
3490  }
3491  if (!remaining_quals.empty()) {
3492  remaining_terms.push_back(build_logical_expression(remaining_quals, kAND));
3493  }
3494  }
3495  // Reconstruct the expression with the transformation applied.
3496  const auto common_expr = build_logical_expression(common_factors, kAND);
3497  if (remaining_terms.empty()) {
3498  return common_expr;
3499  }
3500  const auto remaining_expr = build_logical_expression(remaining_terms, kOR);
3501  return Parser::OperExpr::normalize(kAND, kONE, common_expr, remaining_expr);
3502 }
Definition: sqldefs.h:38
#define CHECK_GE(x, y)
Definition: Logger.h:210
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
bool list_contains_expression(const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
Definition: sqldefs.h:37
std::shared_ptr< Analyzer::Expr > build_logical_expression(const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:276
Definition: sqldefs.h:69
std::vector< std::shared_ptr< Analyzer::Expr > > qual_to_disjunctive_form(const std::shared_ptr< Analyzer::Expr > &qual_expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::rewrite_quals ( const std::list< std::shared_ptr< Analyzer::Expr >> &  quals)

Definition at line 3317 of file RelAlgExecutor.cpp.

References rewrite_expr().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

3318  {
3319  std::list<std::shared_ptr<Analyzer::Expr>> rewritten_quals;
3320  for (const auto& qual : quals) {
3321  const auto rewritten_qual = rewrite_expr(qual.get());
3322  rewritten_quals.push_back(rewritten_qual ? rewritten_qual : qual);
3323  }
3324  return rewritten_quals;
3325 }
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<const RexScalar*> anonymous_namespace{RelAlgExecutor.cpp}::rex_to_conjunctive_form ( const RexScalar qual_expr)

Definition at line 3414 of file RelAlgExecutor.cpp.

References CHECK, CHECK_GE, and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

3414  {
3415  CHECK(qual_expr);
3416  const auto bin_oper = dynamic_cast<const RexOperator*>(qual_expr);
3417  if (!bin_oper || bin_oper->getOperator() != kAND) {
3418  return {qual_expr};
3419  }
3420  CHECK_GE(bin_oper->size(), size_t(2));
3421  auto lhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(0));
3422  for (size_t i = 1; i < bin_oper->size(); ++i) {
3423  const auto rhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(i));
3424  lhs_cf.insert(lhs_cf.end(), rhs_cf.begin(), rhs_cf.end());
3425  }
3426  return lhs_cf;
3427 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::vector< const RexScalar * > rex_to_conjunctive_form(const RexScalar *qual_expr)
Definition: sqldefs.h:37
#define CHECK(condition)
Definition: Logger.h:197

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelCompound compound 
)

Definition at line 1174 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSource().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1174  {
1175  return compound->getScalarSource(i);
1176 }
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelProject project 
)

Definition at line 1178 of file RelAlgExecutor.cpp.

References RelProject::getProjectAt().

1178  {
1179  return project->getProjectAt(i);
1180 }
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelTableFunction table_func 
)

Definition at line 1182 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputAt().

1182  {
1183  return table_func->getTableFuncInputAt(i);
1184 }
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict ( const std::shared_ptr< Analyzer::Expr expr)

Definition at line 1186 of file RelAlgExecutor.cpp.

References kENCODING_DICT, kENCODING_NONE, and TRANSIENT_DICT_ID.

Referenced by set_transient_dict_maybe(), translate_groupby_exprs(), and translate_targets().

1187  {
1188  const auto& ti = expr->get_type_info();
1189  if (!ti.is_string() || ti.get_compression() != kENCODING_NONE) {
1190  return expr;
1191  }
1192  auto transient_dict_ti = ti;
1193  transient_dict_ti.set_compression(kENCODING_DICT);
1194  transient_dict_ti.set_comp_param(TRANSIENT_DICT_ID);
1195  transient_dict_ti.set_fixed_size();
1196  return expr->add_cast(transient_dict_ti);
1197 }
#define TRANSIENT_DICT_ID
Definition: sqltypes.h:250

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict_maybe ( std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::shared_ptr< Analyzer::Expr > &  expr 
)

Definition at line 1199 of file RelAlgExecutor.cpp.

References fold_expr(), and set_transient_dict().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

1201  {
1202  try {
1203  scalar_sources.push_back(set_transient_dict(fold_expr(expr.get())));
1204  } catch (...) {
1205  scalar_sources.push_back(fold_expr(expr.get()));
1206  }
1207 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::synthesize_inputs ( const RelAlgNode ra_node,
const size_t  nest_level,
const std::vector< TargetMetaInfo > &  in_metainfo,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3575 of file RelAlgExecutor.cpp.

References CHECK, CHECK_GE, CHECK_LE, RelAlgNode::getInput(), RelAlgNode::inputCount(), and table_id_from_ra().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and get_inputs_meta().

3579  {
3580  CHECK_LE(size_t(1), ra_node->inputCount());
3581  CHECK_GE(size_t(2), ra_node->inputCount());
3582  const auto input = ra_node->getInput(nest_level);
3583  const auto it_rte_idx = input_to_nest_level.find(input);
3584  CHECK(it_rte_idx != input_to_nest_level.end());
3585  const int rte_idx = it_rte_idx->second;
3586  const int table_id = table_id_from_ra(input);
3587  std::vector<std::shared_ptr<Analyzer::Expr>> inputs;
3588  const auto scan_ra = dynamic_cast<const RelScan*>(input);
3589  int input_idx = 0;
3590  for (const auto& input_meta : in_metainfo) {
3591  inputs.push_back(
3592  std::make_shared<Analyzer::ColumnVar>(input_meta.get_type_info(),
3593  table_id,
3594  scan_ra ? input_idx + 1 : input_idx,
3595  rte_idx));
3596  ++input_idx;
3597  }
3598  return inputs;
3599 }
#define CHECK_GE(x, y)
Definition: Logger.h:210
const RelAlgNode * getInput(const size_t idx) const
#define CHECK_LE(x, y)
Definition: Logger.h:208
int table_id_from_ra(const RelAlgNode *ra_node)
#define CHECK(condition)
Definition: Logger.h:197
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

int anonymous_namespace{RelAlgExecutor.cpp}::table_id_from_ra ( const RelAlgNode ra_node)

Definition at line 984 of file RelAlgExecutor.cpp.

References CHECK, RelAlgNode::getId(), and RelScan::getTableDescriptor().

Referenced by collect_used_input_desc(), get_input_desc_impl(), and synthesize_inputs().

984  {
985  const auto scan_ra = dynamic_cast<const RelScan*>(ra_node);
986  if (scan_ra) {
987  const auto td = scan_ra->getTableDescriptor();
988  CHECK(td);
989  return td->tableId;
990  }
991  return -ra_node->getId();
992 }
unsigned getId() const
#define CHECK(condition)
Definition: Logger.h:197
const TableDescriptor * getTableDescriptor() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::target_exprs_for_union ( RelAlgNode const *  input_node)

Definition at line 3729 of file RelAlgExecutor.cpp.

References RelAlgNode::getId(), RelAlgNode::getOutputMetainfo(), shared::printContainer(), and VLOG.

Referenced by RelAlgExecutor::createUnionWorkUnit().

3730  {
3731  std::vector<TargetMetaInfo> const& tmis = input_node->getOutputMetainfo();
3732  VLOG(3) << "input_node->getOutputMetainfo()=" << shared::printContainer(tmis);
3733  const int negative_node_id = -input_node->getId();
3734  std::vector<std::shared_ptr<Analyzer::Expr>> target_exprs;
3735  target_exprs.reserve(tmis.size());
3736  for (size_t i = 0; i < tmis.size(); ++i) {
3737  target_exprs.push_back(std::make_shared<Analyzer::ColumnVar>(
3738  tmis[i].get_type_info(), negative_node_id, i, 0));
3739  }
3740  return target_exprs;
3741 }
PrintContainer< CONTAINER > printContainer(CONTAINER &container)
Definition: misc.h:64
#define VLOG(n)
Definition: Logger.h:291

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::transform_to_inner ( const Analyzer::Expr expr)

Definition at line 1771 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::computeWindow().

1771  {
1772  const auto tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(expr);
1773  if (tuple) {
1774  std::vector<std::shared_ptr<Analyzer::Expr>> transformed_tuple;
1775  for (const auto& element : tuple->getTuple()) {
1776  transformed_tuple.push_back(transform_to_inner(element.get()));
1777  }
1778  return makeExpr<Analyzer::ExpressionTuple>(transformed_tuple);
1779  }
1780  const auto col = dynamic_cast<const Analyzer::ColumnVar*>(expr);
1781  if (!col) {
1782  throw std::runtime_error("Only columns supported in the window partition for now");
1783  }
1784  return makeExpr<Analyzer::ColumnVar>(
1785  col->get_type_info(), col->get_table_id(), col->get_column_id(), 1);
1786 }
std::shared_ptr< Analyzer::Expr > transform_to_inner(const Analyzer::Expr *expr)

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelCompound compound,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1282 of file RelAlgExecutor.cpp.

References RelCompound::getGroupByCount(), RelCompound::isAggregate(), and set_transient_dict().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1284  {
1285  if (!compound->isAggregate()) {
1286  return {nullptr};
1287  }
1288  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1289  for (size_t group_idx = 0; group_idx < compound->getGroupByCount(); ++group_idx) {
1290  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1291  }
1292  return groupby_exprs;
1293 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
const size_t getGroupByCount() const
bool isAggregate() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelAggregate aggregate,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 1295 of file RelAlgExecutor.cpp.

References RelAggregate::getGroupByCount(), and set_transient_dict().

1297  {
1298  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
1299  for (size_t group_idx = 0; group_idx < aggregate->getGroupByCount(); ++group_idx) {
1300  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
1301  }
1302  return groupby_exprs;
1303 }
const size_t getGroupByCount() const
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)

+ Here is the call graph for this function:

QualsConjunctiveForm anonymous_namespace{RelAlgExecutor.cpp}::translate_quals ( const RelCompound compound,
const RelAlgTranslator translator 
)

Definition at line 1305 of file RelAlgExecutor.cpp.

References fold_expr(), RelCompound::getFilterExpr(), qual_to_conjunctive_form(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit().

1306  {
1307  const auto filter_rex = compound->getFilterExpr();
1308  const auto filter_expr =
1309  filter_rex ? translator.translateScalarRex(filter_rex) : nullptr;
1310  return filter_expr ? qual_to_conjunctive_form(fold_expr(filter_expr.get()))
1312 }
const RexScalar * getFilterExpr() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources ( const RA *  ra_node,
const RelAlgTranslator translator,
const ::ExecutorType  executor_type 
)

Definition at line 1219 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), fold_expr(), get_scalar_sources_size(), Native, rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), RelAlgTranslator::translateScalarRex(), and VLOG.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::createTableFunctionWorkUnit().

1222  {
1223  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1224  const size_t scalar_sources_size = get_scalar_sources_size(ra_node);
1225  VLOG(3) << "get_scalar_sources_size(" << ra_node->toString()
1226  << ") = " << scalar_sources_size;
1227  for (size_t i = 0; i < scalar_sources_size; ++i) {
1228  const auto scalar_rex = scalar_at(i, ra_node);
1229  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1230  // RexRef are synthetic scalars we append at the end of the real ones
1231  // for the sake of taking memory ownership, no real work needed here.
1232  continue;
1233  }
1234 
1235  const auto scalar_expr =
1236  rewrite_array_elements(translator.translateScalarRex(scalar_rex).get());
1237  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1238  if (executor_type == ExecutorType::Native) {
1239  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1240  } else {
1241  scalar_sources.push_back(cast_dict_to_none(fold_expr(rewritten_expr.get())));
1242  }
1243  }
1244 
1245  return scalar_sources;
1246 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
#define VLOG(n)
Definition: Logger.h:291
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources_for_update ( const RA *  ra_node,
const RelAlgTranslator translator,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const ColumnNameList colNames,
size_t  starting_projection_column_idx 
)

Definition at line 1249 of file RelAlgExecutor.cpp.

References cat(), get_scalar_sources_size(), rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

1255  {
1256  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
1257  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
1258  const auto scalar_rex = scalar_at(i, ra_node);
1259  if (dynamic_cast<const RexRef*>(scalar_rex)) {
1260  // RexRef are synthetic scalars we append at the end of the real ones
1261  // for the sake of taking memory ownership, no real work needed here.
1262  continue;
1263  }
1264 
1265  std::shared_ptr<Analyzer::Expr> translated_expr;
1266  if (i >= starting_projection_column_idx && i < get_scalar_sources_size(ra_node) - 1) {
1267  translated_expr = cast_to_column_type(translator.translateScalarRex(scalar_rex),
1268  tableId,
1269  cat,
1270  colNames[i - starting_projection_column_idx]);
1271  } else {
1272  translated_expr = translator.translateScalarRex(scalar_rex);
1273  }
1274  const auto scalar_expr = rewrite_array_elements(translated_expr.get());
1275  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
1276  set_transient_dict_maybe(scalar_sources, rewritten_expr);
1277  }
1278 
1279  return scalar_sources;
1280 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::string cat(Ts &&...args)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelCompound compound,
const RelAlgTranslator translator,
const ExecutorType  executor_type 
)

Definition at line 1314 of file RelAlgExecutor.cpp.

References cast_dict_to_none(), CHECK, CHECK_GE, CHECK_LE, fold_expr(), RexRef::getIndex(), RelCompound::getTargetExpr(), Analyzer::Var::kGROUPBY, Native, rewrite_expr(), set_transient_dict(), RelCompound::size(), RelAlgTranslator::translateAggregateRex(), RelAlgTranslator::translateScalarRex(), and var_ref().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

1320  {
1321  std::vector<Analyzer::Expr*> target_exprs;
1322  for (size_t i = 0; i < compound->size(); ++i) {
1323  const auto target_rex = compound->getTargetExpr(i);
1324  const auto target_rex_agg = dynamic_cast<const RexAgg*>(target_rex);
1325  std::shared_ptr<Analyzer::Expr> target_expr;
1326  if (target_rex_agg) {
1327  target_expr =
1328  RelAlgTranslator::translateAggregateRex(target_rex_agg, scalar_sources);
1329  } else {
1330  const auto target_rex_scalar = dynamic_cast<const RexScalar*>(target_rex);
1331  const auto target_rex_ref = dynamic_cast<const RexRef*>(target_rex_scalar);
1332  if (target_rex_ref) {
1333  const auto ref_idx = target_rex_ref->getIndex();
1334  CHECK_GE(ref_idx, size_t(1));
1335  CHECK_LE(ref_idx, groupby_exprs.size());
1336  const auto groupby_expr = *std::next(groupby_exprs.begin(), ref_idx - 1);
1337  target_expr = var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, ref_idx);
1338  } else {
1339  target_expr = translator.translateScalarRex(target_rex_scalar);
1340  auto rewritten_expr = rewrite_expr(target_expr.get());
1341  target_expr = fold_expr(rewritten_expr.get());
1342  if (executor_type == ExecutorType::Native) {
1343  try {
1344  target_expr = set_transient_dict(target_expr);
1345  } catch (...) {
1346  // noop
1347  }
1348  } else {
1349  target_expr = cast_dict_to_none(target_expr);
1350  }
1351  }
1352  }
1353  CHECK(target_expr);
1354  target_exprs_owned.push_back(target_expr);
1355  target_exprs.push_back(target_expr.get());
1356  }
1357  return target_exprs;
1358 }
const Rex * getTargetExpr(const size_t i) const
size_t getIndex() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t size() const override
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1669
#define CHECK_GE(x, y)
Definition: Logger.h:210
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_dict_to_none(const std::shared_ptr< Analyzer::Expr > &input)
#define CHECK_LE(x, y)
Definition: Logger.h:208
#define CHECK(condition)
Definition: Logger.h:197
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelAggregate aggregate,
const RelAlgTranslator translator 
)

Definition at line 1360 of file RelAlgExecutor.cpp.

References CHECK, fold_expr(), RelAggregate::getAggExprs(), Analyzer::Var::kGROUPBY, RelAlgTranslator::translateAggregateRex(), and var_ref().

1365  {
1366  std::vector<Analyzer::Expr*> target_exprs;
1367  size_t group_key_idx = 1;
1368  for (const auto& groupby_expr : groupby_exprs) {
1369  auto target_expr =
1370  var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, group_key_idx++);
1371  target_exprs_owned.push_back(target_expr);
1372  target_exprs.push_back(target_expr.get());
1373  }
1374 
1375  for (const auto& target_rex_agg : aggregate->getAggExprs()) {
1376  auto target_expr =
1377  RelAlgTranslator::translateAggregateRex(target_rex_agg.get(), scalar_sources);
1378  CHECK(target_expr);
1379  target_expr = fold_expr(target_expr.get());
1380  target_exprs_owned.push_back(target_expr);
1381  target_exprs.push_back(target_expr.get());
1382  }
1383  return target_exprs;
1384 }
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1669
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
#define CHECK(condition)
Definition: Logger.h:197
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function: