OmniSciDB  a47db9e897
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
anonymous_namespace{RelAlgExecutor.cpp} Namespace Reference

Classes

class  RexUsedInputsVisitor
 

Functions

bool node_is_aggregate (const RelAlgNode *ra)
 
std::unordered_set< PhysicalInputget_physical_inputs (const Catalog_Namespace::Catalog &cat, const RelAlgNode *ra)
 
const RelAlgNodeget_data_sink (const RelAlgNode *ra_node)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelCompound *compound, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelAggregate *aggregate, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelProject *project, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelTableFunction *table_func, const Catalog_Namespace::Catalog &cat)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_used_inputs (const RelFilter *filter, const Catalog_Namespace::Catalog &cat)
 
int table_id_from_ra (const RelAlgNode *ra_node)
 
std::unordered_map< const
RelAlgNode *, int > 
get_input_nest_levels (const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
 
std::pair< std::unordered_set
< const RexInput * >
, std::vector< std::shared_ptr
< RexInput > > > 
get_join_source_used_inputs (const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
 
std::vector< const RelAlgNode * > get_non_join_sequence (const RelAlgNode *ra)
 
void collect_used_input_desc (std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
template<class RA >
std::pair< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor > > > 
get_input_desc_impl (const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
template<class RA >
std::tuple< std::vector
< InputDescriptor >, std::list
< std::shared_ptr< const
InputColDescriptor >
>, std::vector
< std::shared_ptr< RexInput > > > 
get_input_desc (const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
 
size_t get_scalar_sources_size (const RelCompound *compound)
 
size_t get_scalar_sources_size (const RelProject *project)
 
size_t get_scalar_sources_size (const RelTableFunction *table_func)
 
const RexScalarscalar_at (const size_t i, const RelCompound *compound)
 
const RexScalarscalar_at (const size_t i, const RelProject *project)
 
const RexScalarscalar_at (const size_t i, const RelTableFunction *table_func)
 
std::shared_ptr< Analyzer::Exprset_transient_dict (const std::shared_ptr< Analyzer::Expr > expr)
 
void set_transient_dict_maybe (std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources (const RA *ra_node, const RelAlgTranslator &translator)
 
std::shared_ptr< Analyzer::Exprcast_to_column_type (std::shared_ptr< Analyzer::Expr > expr, int32_t tableId, const Catalog_Namespace::Catalog &cat, const std::string &colName)
 
template<class RA >
std::vector< std::shared_ptr
< Analyzer::Expr > > 
translate_scalar_sources_for_update (const RA *ra_node, const RelAlgTranslator &translator, int32_t tableId, const Catalog_Namespace::Catalog &cat, const ColumnNameList &colNames, size_t starting_projection_column_idx)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelCompound *compound, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
translate_groupby_exprs (const RelAggregate *aggregate, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)
 
QualsConjunctiveForm translate_quals (const RelCompound *compound, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelCompound *compound, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelAggregate *aggregate, const RelAlgTranslator &translator)
 
std::vector< Analyzer::Expr * > translate_targets_for_update (std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const RelCompound *compound, const RelAlgTranslator &translator, int32_t tableId, const Catalog_Namespace::Catalog &cat, const ColumnNameList &colNames, size_t starting_projection_column_idx)
 
bool is_count_distinct (const Analyzer::Expr *expr)
 
bool is_agg (const Analyzer::Expr *expr)
 
std::vector< TargetMetaInfoget_modify_manipulated_targets_meta (ModifyManipulationTarget const *manip_node, const std::vector< Analyzer::Expr * > &target_exprs)
 
SQLTypeInfo get_logical_type_for_expr (const Analyzer::Expr &expr)
 
template<class RA >
std::vector< TargetMetaInfoget_targets_meta (const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
 
bool is_window_execution_unit (const RelAlgExecutionUnit &ra_exe_unit)
 
std::shared_ptr< Analyzer::Exprtransform_to_inner (const Analyzer::Expr *expr)
 
std::list< Analyzer::OrderEntryget_order_entries (const RelSort *sort)
 
size_t get_scan_limit (const RelAlgNode *ra, const size_t limit)
 
bool first_oe_is_desc (const std::list< Analyzer::OrderEntry > &order_entries)
 
size_t groups_approx_upper_bound (const std::vector< InputTableInfo > &table_infos)
 
bool compute_output_buffer_size (const RelAlgExecutionUnit &ra_exe_unit)
 
bool exe_unit_has_quals (const RelAlgExecutionUnit ra_exe_unit)
 
RelAlgExecutionUnit decide_approx_count_distinct_implementation (const RelAlgExecutionUnit &ra_exe_unit_in, const std::vector< InputTableInfo > &table_infos, const Executor *executor, const ExecutorDeviceType device_type_in, std::vector< std::shared_ptr< Analyzer::Expr >> &target_exprs_owned)
 
void build_render_targets (RenderInfo &render_info, const std::vector< Analyzer::Expr * > &work_unit_target_exprs, const std::vector< TargetMetaInfo > &targets_meta)
 
bool can_use_bump_allocator (const RelAlgExecutionUnit &ra_exe_unit, const CompilationOptions &co, const ExecutionOptions &eo)
 
JoinType get_join_type (const RelAlgNode *ra)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals (const RexScalar *scalar)
 
std::unique_ptr< const
RexOperator
get_bitwise_equals_conjunction (const RexScalar *scalar)
 
std::vector< JoinTypeleft_deep_join_types (const RelLeftDeepInnerJoin *left_deep_join)
 
template<class RA >
std::vector< size_t > do_table_reordering (std::vector< InputDescriptor > &input_descs, std::list< std::shared_ptr< const InputColDescriptor >> &input_col_descs, const JoinQualsPerNestingLevel &left_deep_join_quals, std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const RA *node, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::vector< size_t > get_left_deep_join_input_sizes (const RelLeftDeepInnerJoin *left_deep_join)
 
std::list< std::shared_ptr
< Analyzer::Expr > > 
rewrite_quals (const std::list< std::shared_ptr< Analyzer::Expr >> &quals)
 
std::vector< const RexScalar * > rex_to_conjunctive_form (const RexScalar *qual_expr)
 
std::shared_ptr< Analyzer::Exprbuild_logical_expression (const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
 
template<class QualsList >
bool list_contains_expression (const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
 
std::shared_ptr< Analyzer::Exprreverse_logical_distribution (const std::shared_ptr< Analyzer::Expr > &expr)
 
std::vector< std::shared_ptr
< Analyzer::Expr > > 
synthesize_inputs (const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 
std::pair< std::vector
< TargetMetaInfo >
, std::vector< std::shared_ptr
< Analyzer::Expr > > > 
get_inputs_meta (const RelFilter *filter, const RelAlgTranslator &translator, const std::vector< std::shared_ptr< RexInput >> &inputs_owned, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
 

Function Documentation

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::build_logical_expression ( const std::vector< std::shared_ptr< Analyzer::Expr >> &  factors,
const SQLOps  sql_op 
)

Definition at line 2702 of file RelAlgExecutor.cpp.

References CHECK(), kONE, and Parser::OperExpr::normalize().

Referenced by reverse_logical_distribution().

2704  {
2705  CHECK(!factors.empty());
2706  auto acc = factors.front();
2707  for (size_t i = 1; i < factors.size(); ++i) {
2708  acc = Parser::OperExpr::normalize(sql_op, kONE, acc, factors[i]);
2709  }
2710  return acc;
2711 }
CHECK(cgen_state)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:261
Definition: sqldefs.h:69

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::build_render_targets ( RenderInfo render_info,
const std::vector< Analyzer::Expr * > &  work_unit_target_exprs,
const std::vector< TargetMetaInfo > &  targets_meta 
)

Definition at line 1907 of file RelAlgExecutor.cpp.

References CHECK_EQ, and RenderInfo::targets.

Referenced by RelAlgExecutor::executeWorkUnit().

1909  {
1910  CHECK_EQ(work_unit_target_exprs.size(), targets_meta.size());
1911  render_info.targets.clear();
1912  for (size_t i = 0; i < targets_meta.size(); ++i) {
1913  render_info.targets.emplace_back(std::make_shared<Analyzer::TargetEntry>(
1914  targets_meta[i].get_resname(),
1915  work_unit_target_exprs[i]->get_shared_ptr(),
1916  false));
1917  }
1918 }
#define CHECK_EQ(x, y)
Definition: Logger.h:198
std::vector< std::shared_ptr< Analyzer::TargetEntry > > targets
Definition: RenderInfo.h:38

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::can_use_bump_allocator ( const RelAlgExecutionUnit ra_exe_unit,
const CompilationOptions co,
const ExecutionOptions eo 
)
inline

Definition at line 1920 of file RelAlgExecutor.cpp.

References CompilationOptions::device_type_, g_enable_bump_allocator, GPU, SortInfo::order_entries, ExecutionOptions::output_columnar_hint, and RelAlgExecutionUnit::sort_info.

Referenced by RelAlgExecutor::executeWorkUnit().

1922  {
1924  !eo.output_columnar_hint && ra_exe_unit.sort_info.order_entries.empty();
1925 }
bool g_enable_bump_allocator
Definition: Execute.cpp:99
const std::list< Analyzer::OrderEntry > order_entries
const SortInfo sort_info
const bool output_columnar_hint
ExecutorDeviceType device_type_

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::cast_to_column_type ( std::shared_ptr< Analyzer::Expr expr,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const std::string &  colName 
)

Definition at line 849 of file RelAlgExecutor.cpp.

References ColumnDescriptor::columnType, get_logical_type_info(), and Catalog_Namespace::Catalog::getMetadataForColumn().

Referenced by translate_scalar_sources_for_update(), and translate_targets_for_update().

852  {
853  const auto cd = *cat.getMetadataForColumn(tableId, colName);
854 
855  auto cast_ti = cd.columnType;
856 
857  // Type needs to be scrubbed because otherwise NULL values could get cut off or
858  // truncated
859  auto cast_logical_ti = get_logical_type_info(cast_ti);
860  if (cast_logical_ti.is_varlen() && cast_logical_ti.is_array()) {
861  return expr;
862  }
863 
864  // CastIR.cpp Executor::codegenCast() doesn't know how to cast from a ColumnVar
865  // so it CHECK's unless casting is skipped here.
866  if (std::dynamic_pointer_cast<Analyzer::ColumnVar>(expr)) {
867  return expr;
868  }
869 
870  // Cast the expression to match the type of the output column.
871  return expr->add_cast(cast_logical_ti);
872 }
SQLTypeInfo get_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:869
const ColumnDescriptor * getMetadataForColumn(int tableId, const std::string &colName) const
SQLTypeInfo columnType

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void anonymous_namespace{RelAlgExecutor.cpp}::collect_used_input_desc ( std::vector< InputDescriptor > &  input_descs,
const Catalog_Namespace::Catalog cat,
std::unordered_set< std::shared_ptr< const InputColDescriptor >> &  input_col_descs_unique,
const RelAlgNode ra_node,
const std::unordered_set< const RexInput * > &  source_used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 674 of file RelAlgExecutor.cpp.

References get_data_sink(), get_non_join_sequence(), Catalog_Namespace::Catalog::getColumnIdBySpi(), and table_id_from_ra().

Referenced by get_input_desc_impl().

680  {
681  std::unordered_set<InputDescriptor> input_descs_unique(input_descs.begin(),
682  input_descs.end());
683  const auto non_join_src_seq = get_non_join_sequence(get_data_sink(ra_node));
684  std::unordered_map<const RelAlgNode*, int> non_join_to_nest_level;
685  for (const auto node : non_join_src_seq) {
686  non_join_to_nest_level.insert(std::make_pair(node, non_join_to_nest_level.size()));
687  }
688  for (const auto used_input : source_used_inputs) {
689  const auto input_ra = used_input->getSourceNode();
690  const int table_id = table_id_from_ra(input_ra);
691  const auto col_id = used_input->getIndex();
692  auto it = input_to_nest_level.find(input_ra);
693  if (it == input_to_nest_level.end()) {
694  throw std::runtime_error("Bushy joins not supported");
695  }
696  const int input_desc = it->second;
697  input_col_descs_unique.insert(std::make_shared<const InputColDescriptor>(
698  dynamic_cast<const RelScan*>(input_ra)
699  ? cat.getColumnIdBySpi(table_id, col_id + 1)
700  : col_id,
701  table_id,
702  input_desc));
703  }
704 }
int table_id_from_ra(const RelAlgNode *ra_node)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
std::vector< const RelAlgNode * > get_non_join_sequence(const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1432

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::compute_output_buffer_size ( const RelAlgExecutionUnit ra_exe_unit)

Determines whether a query needs to compute the size of its output buffer. Returns true for projection queries with no LIMIT or a LIMIT that exceeds the high scan limit threshold (meaning it would be cheaper to compute the number of rows passing or use the bump allocator than allocate the current scan limit per GPU)

Definition at line 1819 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::groupby_exprs, Executor::high_scan_limit, RelAlgExecutionUnit::scan_limit, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

1819  {
1820  for (const auto target_expr : ra_exe_unit.target_exprs) {
1821  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
1822  return false;
1823  }
1824  }
1825  if (ra_exe_unit.groupby_exprs.size() == 1 && !ra_exe_unit.groupby_exprs.front() &&
1826  (!ra_exe_unit.scan_limit || ra_exe_unit.scan_limit > Executor::high_scan_limit)) {
1827  return true;
1828  }
1829  return false;
1830 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
static const size_t high_scan_limit
Definition: Execute.h:468

+ Here is the caller graph for this function:

RelAlgExecutionUnit anonymous_namespace{RelAlgExecutor.cpp}::decide_approx_count_distinct_implementation ( const RelAlgExecutionUnit ra_exe_unit_in,
const std::vector< InputTableInfo > &  table_infos,
const Executor executor,
const ExecutorDeviceType  device_type_in,
std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned 
)

Definition at line 1837 of file RelAlgExecutor.cpp.

References Bitmap, CHECK(), CHECK_GE, g_bigint_count, g_cluster, g_hll_precision_bits, get_agg_type(), get_count_distinct_sub_bitmap_count(), get_target_info(), getExpressionRange(), GPU, hll_size_for_rate(), Integer, kAPPROX_COUNT_DISTINCT, kCOUNT, kENCODING_DICT, kINT, and RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit(), and RelAlgExecutor::handleOutOfMemoryRetry().

1842  {
1843  RelAlgExecutionUnit ra_exe_unit = ra_exe_unit_in;
1844  for (size_t i = 0; i < ra_exe_unit.target_exprs.size(); ++i) {
1845  const auto target_expr = ra_exe_unit.target_exprs[i];
1846  const auto agg_info = get_target_info(target_expr, g_bigint_count);
1847  if (agg_info.agg_kind != kAPPROX_COUNT_DISTINCT) {
1848  continue;
1849  }
1850  CHECK(dynamic_cast<const Analyzer::AggExpr*>(target_expr));
1851  const auto arg = static_cast<Analyzer::AggExpr*>(target_expr)->get_own_arg();
1852  CHECK(arg);
1853  const auto& arg_ti = arg->get_type_info();
1854  // Avoid calling getExpressionRange for variable length types (string and array),
1855  // it'd trigger an assertion since that API expects to be called only for types
1856  // for which the notion of range is well-defined. A bit of a kludge, but the
1857  // logic to reject these types anyway is at lower levels in the stack and not
1858  // really worth pulling into a separate function for now.
1859  if (!(arg_ti.is_number() || arg_ti.is_boolean() || arg_ti.is_time() ||
1860  (arg_ti.is_string() && arg_ti.get_compression() == kENCODING_DICT))) {
1861  continue;
1862  }
1863  const auto arg_range = getExpressionRange(arg.get(), table_infos, executor);
1864  if (arg_range.getType() != ExpressionRangeType::Integer) {
1865  continue;
1866  }
1867  // When running distributed, the threshold for using the precise implementation
1868  // must be consistent across all leaves, otherwise we could have a mix of precise
1869  // and approximate bitmaps and we cannot aggregate them.
1870  const auto device_type = g_cluster ? ExecutorDeviceType::GPU : device_type_in;
1871  const auto bitmap_sz_bits = arg_range.getIntMax() - arg_range.getIntMin() + 1;
1872  const auto sub_bitmap_count =
1873  get_count_distinct_sub_bitmap_count(bitmap_sz_bits, ra_exe_unit, device_type);
1874  int64_t approx_bitmap_sz_bits{0};
1875  const auto error_rate =
1876  static_cast<Analyzer::AggExpr*>(target_expr)->get_error_rate();
1877  if (error_rate) {
1878  CHECK(error_rate->get_type_info().get_type() == kINT);
1879  CHECK_GE(error_rate->get_constval().intval, 1);
1880  approx_bitmap_sz_bits = hll_size_for_rate(error_rate->get_constval().intval);
1881  } else {
1882  approx_bitmap_sz_bits = g_hll_precision_bits;
1883  }
1884  CountDistinctDescriptor approx_count_distinct_desc{CountDistinctImplType::Bitmap,
1885  arg_range.getIntMin(),
1886  approx_bitmap_sz_bits,
1887  true,
1888  device_type,
1889  sub_bitmap_count};
1890  CountDistinctDescriptor precise_count_distinct_desc{CountDistinctImplType::Bitmap,
1891  arg_range.getIntMin(),
1892  bitmap_sz_bits,
1893  false,
1894  device_type,
1895  sub_bitmap_count};
1896  if (approx_count_distinct_desc.bitmapPaddedSizeBytes() >=
1897  precise_count_distinct_desc.bitmapPaddedSizeBytes()) {
1898  auto precise_count_distinct = makeExpr<Analyzer::AggExpr>(
1899  get_agg_type(kCOUNT, arg.get()), kCOUNT, arg, true, nullptr);
1900  target_exprs_owned.push_back(precise_count_distinct);
1901  ra_exe_unit.target_exprs[i] = precise_count_distinct.get();
1902  }
1903  }
1904  return ra_exe_unit;
1905 }
std::vector< Analyzer::Expr * > target_exprs
int hll_size_for_rate(const int err_percent)
Definition: HyperLogLog.h:115
bool g_cluster
TargetInfo get_target_info(const PointerType target_expr, const bool bigint_count)
Definition: TargetInfo.h:65
#define CHECK_GE(x, y)
Definition: Logger.h:203
SQLTypeInfo get_agg_type(const SQLAgg agg_kind, const Analyzer::Expr *arg_expr)
int g_hll_precision_bits
size_t get_count_distinct_sub_bitmap_count(const size_t bitmap_sz_bits, const RelAlgExecutionUnit &ra_exe_unit, const ExecutorDeviceType device_type)
CHECK(cgen_state)
bool g_bigint_count
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)
Definition: sqldefs.h:71
Definition: sqltypes.h:48

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::do_table_reordering ( std::vector< InputDescriptor > &  input_descs,
std::list< std::shared_ptr< const InputColDescriptor >> &  input_col_descs,
const JoinQualsPerNestingLevel left_deep_join_quals,
std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const RA *  node,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 2447 of file RelAlgExecutor.cpp.

References CHECK(), g_cluster, get_input_desc(), get_input_nest_levels(), get_node_input_permutation(), and table_is_replicated().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

2454  {
2455  if (g_cluster) {
2456  // Disable table reordering in distributed mode. The aggregator does not have enough
2457  // information to break ties
2458  return {};
2459  }
2460  const auto& cat = *executor->getCatalog();
2461  for (const auto& table_info : query_infos) {
2462  if (table_info.table_id < 0) {
2463  continue;
2464  }
2465  const auto td = cat.getMetadataForTable(table_info.table_id);
2466  CHECK(td);
2467  if (table_is_replicated(td)) {
2468  return {};
2469  }
2470  }
2471  const auto input_permutation =
2472  get_node_input_permutation(left_deep_join_quals, query_infos, executor);
2473  input_to_nest_level = get_input_nest_levels(node, input_permutation);
2474  std::tie(input_descs, input_col_descs, std::ignore) =
2475  get_input_desc(node, input_to_nest_level, input_permutation, cat);
2476  return input_permutation;
2477 }
std::unordered_map< const RelAlgNode *, int > get_input_nest_levels(const RelAlgNode *ra_node, const std::vector< size_t > &input_permutation)
bool g_cluster
std::vector< node_t > get_node_input_permutation(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor)
CHECK(cgen_state)
bool table_is_replicated(const TableDescriptor *td)
std::tuple< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > >, std::vector< std::shared_ptr< RexInput > > > get_input_desc(const RA *ra_node, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::exe_unit_has_quals ( const RelAlgExecutionUnit  ra_exe_unit)
inline

Definition at line 1832 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::join_quals, RelAlgExecutionUnit::quals, and RelAlgExecutionUnit::simple_quals.

Referenced by RelAlgExecutor::executeWorkUnit().

1832  {
1833  return !(ra_exe_unit.quals.empty() && ra_exe_unit.join_quals.empty() &&
1834  ra_exe_unit.simple_quals.empty());
1835 }
const JoinQualsPerNestingLevel join_quals
std::list< std::shared_ptr< Analyzer::Expr > > quals
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::first_oe_is_desc ( const std::list< Analyzer::OrderEntry > &  order_entries)

Definition at line 1625 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

1625  {
1626  return !order_entries.empty() && order_entries.front().is_desc;
1627 }

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals ( const RexScalar scalar)

Definition at line 2366 of file RelAlgExecutor.cpp.

References CHECK_EQ, kAND, kBW_EQ, kEQ, kISNULL, kOR, and RexVisitorBase< T >::visit().

Referenced by get_bitwise_equals_conjunction().

2366  {
2367  const auto condition = dynamic_cast<const RexOperator*>(scalar);
2368  if (!condition || condition->getOperator() != kOR || condition->size() != 2) {
2369  return nullptr;
2370  }
2371  const auto equi_join_condition =
2372  dynamic_cast<const RexOperator*>(condition->getOperand(0));
2373  if (!equi_join_condition || equi_join_condition->getOperator() != kEQ) {
2374  return nullptr;
2375  }
2376  const auto both_are_null_condition =
2377  dynamic_cast<const RexOperator*>(condition->getOperand(1));
2378  if (!both_are_null_condition || both_are_null_condition->getOperator() != kAND ||
2379  both_are_null_condition->size() != 2) {
2380  return nullptr;
2381  }
2382  const auto lhs_is_null =
2383  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(0));
2384  const auto rhs_is_null =
2385  dynamic_cast<const RexOperator*>(both_are_null_condition->getOperand(1));
2386  if (!lhs_is_null || !rhs_is_null || lhs_is_null->getOperator() != kISNULL ||
2387  rhs_is_null->getOperator() != kISNULL) {
2388  return nullptr;
2389  }
2390  CHECK_EQ(size_t(1), lhs_is_null->size());
2391  CHECK_EQ(size_t(1), rhs_is_null->size());
2392  CHECK_EQ(size_t(2), equi_join_condition->size());
2393  const auto eq_lhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(0));
2394  const auto eq_rhs = dynamic_cast<const RexInput*>(equi_join_condition->getOperand(1));
2395  const auto is_null_lhs = dynamic_cast<const RexInput*>(lhs_is_null->getOperand(0));
2396  const auto is_null_rhs = dynamic_cast<const RexInput*>(rhs_is_null->getOperand(0));
2397  if (!eq_lhs || !eq_rhs || !is_null_lhs || !is_null_rhs) {
2398  return nullptr;
2399  }
2400  std::vector<std::unique_ptr<const RexScalar>> eq_operands;
2401  if (*eq_lhs == *is_null_lhs && *eq_rhs == *is_null_rhs) {
2402  RexDeepCopyVisitor deep_copy_visitor;
2403  auto lhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(0));
2404  auto rhs_op_copy = deep_copy_visitor.visit(equi_join_condition->getOperand(1));
2405  eq_operands.emplace_back(lhs_op_copy.release());
2406  eq_operands.emplace_back(rhs_op_copy.release());
2407  return boost::make_unique<const RexOperator>(
2408  kBW_EQ, eq_operands, equi_join_condition->getType());
2409  }
2410  return nullptr;
2411 }
#define CHECK_EQ(x, y)
Definition: Logger.h:198
Definition: sqldefs.h:38
Definition: sqldefs.h:30
virtual T visit(const RexScalar *rex_scalar) const
Definition: RexVisitor.h:27
Definition: sqldefs.h:37
Definition: sqldefs.h:31

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unique_ptr<const RexOperator> anonymous_namespace{RelAlgExecutor.cpp}::get_bitwise_equals_conjunction ( const RexScalar scalar)

Definition at line 2413 of file RelAlgExecutor.cpp.

References CHECK_GE, get_bitwise_equals(), and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

2414  {
2415  const auto condition = dynamic_cast<const RexOperator*>(scalar);
2416  if (condition && condition->getOperator() == kAND) {
2417  CHECK_GE(condition->size(), size_t(2));
2418  auto acc = get_bitwise_equals(condition->getOperand(0));
2419  if (!acc) {
2420  return nullptr;
2421  }
2422  for (size_t i = 1; i < condition->size(); ++i) {
2423  std::vector<std::unique_ptr<const RexScalar>> and_operands;
2424  and_operands.emplace_back(std::move(acc));
2425  and_operands.emplace_back(get_bitwise_equals_conjunction(condition->getOperand(i)));
2426  acc =
2427  boost::make_unique<const RexOperator>(kAND, and_operands, condition->getType());
2428  }
2429  return acc;
2430  }
2431  return get_bitwise_equals(scalar);
2432 }
std::unique_ptr< const RexOperator > get_bitwise_equals_conjunction(const RexScalar *scalar)
#define CHECK_GE(x, y)
Definition: Logger.h:203
Definition: sqldefs.h:37
std::unique_ptr< const RexOperator > get_bitwise_equals(const RexScalar *scalar)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RelAlgNode* anonymous_namespace{RelAlgExecutor.cpp}::get_data_sink ( const RelAlgNode ra_node)

Definition at line 490 of file RelAlgExecutor.cpp.

References CHECK_EQ, RelAlgNode::getInput(), RelAlgNode::inputCount(), and join().

Referenced by collect_used_input_desc(), get_input_desc_impl(), get_input_nest_levels(), get_inputs_meta(), get_join_source_used_inputs(), get_join_type(), and get_used_inputs().

490  {
491  if (auto join = dynamic_cast<const RelJoin*>(ra_node)) {
492  CHECK_EQ(size_t(2), join->inputCount());
493  return join;
494  }
495  CHECK_EQ(size_t(1), ra_node->inputCount());
496  auto only_src = ra_node->getInput(0);
497  const bool is_join = dynamic_cast<const RelJoin*>(only_src) ||
498  dynamic_cast<const RelLeftDeepInnerJoin*>(only_src);
499  return is_join ? only_src : ra_node;
500 }
#define CHECK_EQ(x, y)
Definition: Logger.h:198
std::string join(T const &container, std::string const &delim)
const RelAlgNode * getInput(const size_t idx) const
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::tuple<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> >, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc ( const RA *  ra_node,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 767 of file RelAlgExecutor.cpp.

References get_input_desc_impl(), and get_used_inputs().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createModifyCompoundWorkUnit(), RelAlgExecutor::createModifyProjectWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), and do_table_reordering().

770  {
771  std::unordered_set<const RexInput*> used_inputs;
772  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
773  std::tie(used_inputs, used_inputs_owned) = get_used_inputs(ra_node, cat);
774  auto input_desc_pair = get_input_desc_impl(
775  ra_node, used_inputs, input_to_nest_level, input_permutation, cat);
776  return std::make_tuple(
777  input_desc_pair.first, input_desc_pair.second, used_inputs_owned);
778 }
std::pair< std::vector< InputDescriptor >, std::list< std::shared_ptr< const InputColDescriptor > > > get_input_desc_impl(const RA *ra_node, const std::unordered_set< const RexInput * > &used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level, const std::vector< size_t > &input_permutation, const Catalog_Namespace::Catalog &cat)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_used_inputs(const RelCompound *compound, const Catalog_Namespace::Catalog &cat)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::pair<std::vector<InputDescriptor>, std::list<std::shared_ptr<const InputColDescriptor> > > anonymous_namespace{RelAlgExecutor.cpp}::get_input_desc_impl ( const RA *  ra_node,
const std::unordered_set< const RexInput * > &  used_inputs,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level,
const std::vector< size_t > &  input_permutation,
const Catalog_Namespace::Catalog cat 
)

Definition at line 709 of file RelAlgExecutor.cpp.

References collect_used_input_desc(), get_data_sink(), get_join_source_used_inputs(), InputDescriptor::getNestLevel(), and table_id_from_ra().

Referenced by get_input_desc().

713  {
714  std::vector<InputDescriptor> input_descs;
715  const auto data_sink_node = get_data_sink(ra_node);
716  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
717  const auto input_node_idx =
718  input_permutation.empty() ? input_idx : input_permutation[input_idx];
719  const auto input_ra = data_sink_node->getInput(input_node_idx);
720  const int table_id = table_id_from_ra(input_ra);
721  input_descs.emplace_back(table_id, input_idx);
722  }
723  std::sort(input_descs.begin(),
724  input_descs.end(),
725  [](const InputDescriptor& lhs, const InputDescriptor& rhs) {
726  return lhs.getNestLevel() < rhs.getNestLevel();
727  });
728  std::unordered_set<std::shared_ptr<const InputColDescriptor>> input_col_descs_unique;
729  collect_used_input_desc(input_descs,
730  cat,
731  input_col_descs_unique,
732  ra_node,
733  used_inputs,
734  input_to_nest_level);
735  std::unordered_set<const RexInput*> join_source_used_inputs;
736  std::vector<std::shared_ptr<RexInput>> join_source_used_inputs_owned;
737  std::tie(join_source_used_inputs, join_source_used_inputs_owned) =
738  get_join_source_used_inputs(ra_node, cat);
739  collect_used_input_desc(input_descs,
740  cat,
741  input_col_descs_unique,
742  ra_node,
743  join_source_used_inputs,
744  input_to_nest_level);
745  std::vector<std::shared_ptr<const InputColDescriptor>> input_col_descs(
746  input_col_descs_unique.begin(), input_col_descs_unique.end());
747 
748  std::sort(
749  input_col_descs.begin(),
750  input_col_descs.end(),
751  [](std::shared_ptr<const InputColDescriptor> const& lhs,
752  std::shared_ptr<const InputColDescriptor> const& rhs) {
753  if (lhs->getScanDesc().getNestLevel() == rhs->getScanDesc().getNestLevel()) {
754  return lhs->getColId() < rhs->getColId();
755  }
756  return lhs->getScanDesc().getNestLevel() < rhs->getScanDesc().getNestLevel();
757  });
758  return {input_descs,
759  std::list<std::shared_ptr<const InputColDescriptor>>(input_col_descs.begin(),
760  input_col_descs.end())};
761 }
void collect_used_input_desc(std::vector< InputDescriptor > &input_descs, const Catalog_Namespace::Catalog &cat, std::unordered_set< std::shared_ptr< const InputColDescriptor >> &input_col_descs_unique, const RelAlgNode *ra_node, const std::unordered_set< const RexInput * > &source_used_inputs, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::pair< std::unordered_set< const RexInput * >, std::vector< std::shared_ptr< RexInput > > > get_join_source_used_inputs(const RelAlgNode *ra_node, const Catalog_Namespace::Catalog &cat)
int table_id_from_ra(const RelAlgNode *ra_node)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)
int getNestLevel() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_map<const RelAlgNode*, int> anonymous_namespace{RelAlgExecutor.cpp}::get_input_nest_levels ( const RelAlgNode ra_node,
const std::vector< size_t > &  input_permutation 
)

Definition at line 605 of file RelAlgExecutor.cpp.

References CHECK(), get_data_sink(), logger::INFO, and LOG_IF.

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createModifyCompoundWorkUnit(), RelAlgExecutor::createModifyProjectWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), and do_table_reordering().

607  {
608  const auto data_sink_node = get_data_sink(ra_node);
609  std::unordered_map<const RelAlgNode*, int> input_to_nest_level;
610  for (size_t input_idx = 0; input_idx < data_sink_node->inputCount(); ++input_idx) {
611  const auto input_node_idx =
612  input_permutation.empty() ? input_idx : input_permutation[input_idx];
613  const auto input_ra = data_sink_node->getInput(input_node_idx);
614  const auto it_ok = input_to_nest_level.emplace(input_ra, input_idx);
615  CHECK(it_ok.second);
616  LOG_IF(INFO, !input_permutation.empty())
617  << "Assigned input " << input_ra->toString() << " to nest level " << input_idx;
618  }
619  return input_to_nest_level;
620 }
#define LOG_IF(severity, condition)
Definition: Logger.h:276
CHECK(cgen_state)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::vector<TargetMetaInfo>, std::vector<std::shared_ptr<Analyzer::Expr> > > anonymous_namespace{RelAlgExecutor.cpp}::get_inputs_meta ( const RelFilter filter,
const RelAlgTranslator translator,
const std::vector< std::shared_ptr< RexInput >> &  inputs_owned,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 3157 of file RelAlgExecutor.cpp.

References CHECK(), get_data_sink(), get_exprs_not_owned(), get_targets_meta(), synthesize_inputs(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createFilterWorkUnit().

3160  {
3161  std::vector<TargetMetaInfo> in_metainfo;
3162  std::vector<std::shared_ptr<Analyzer::Expr>> exprs_owned;
3163  const auto data_sink_node = get_data_sink(filter);
3164  auto input_it = inputs_owned.begin();
3165  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
3166  const auto source = data_sink_node->getInput(nest_level);
3167  const auto scan_source = dynamic_cast<const RelScan*>(source);
3168  if (scan_source) {
3169  CHECK(source->getOutputMetainfo().empty());
3170  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources_owned;
3171  for (size_t i = 0; i < scan_source->size(); ++i, ++input_it) {
3172  scalar_sources_owned.push_back(translator.translateScalarRex(input_it->get()));
3173  }
3174  const auto source_metadata =
3175  get_targets_meta(scan_source, get_exprs_not_owned(scalar_sources_owned));
3176  in_metainfo.insert(
3177  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3178  exprs_owned.insert(
3179  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3180  } else {
3181  const auto& source_metadata = source->getOutputMetainfo();
3182  input_it += source_metadata.size();
3183  in_metainfo.insert(
3184  in_metainfo.end(), source_metadata.begin(), source_metadata.end());
3185  const auto scalar_sources_owned = synthesize_inputs(
3186  data_sink_node, nest_level, source_metadata, input_to_nest_level);
3187  exprs_owned.insert(
3188  exprs_owned.end(), scalar_sources_owned.begin(), scalar_sources_owned.end());
3189  }
3190  }
3191  return std::make_pair(in_metainfo, exprs_owned);
3192 }
std::vector< std::shared_ptr< Analyzer::Expr > > synthesize_inputs(const RelAlgNode *ra_node, const size_t nest_level, const std::vector< TargetMetaInfo > &in_metainfo, const std::unordered_map< const RelAlgNode *, int > &input_to_nest_level)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
std::vector< Analyzer::Expr * > get_exprs_not_owned(const std::vector< std::shared_ptr< Analyzer::Expr >> &exprs)
Definition: Execute.h:214
CHECK(cgen_state)
std::vector< TargetMetaInfo > get_targets_meta(const RA *ra_node, const std::vector< Analyzer::Expr * > &target_exprs)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_join_source_used_inputs ( const RelAlgNode ra_node,
const Catalog_Namespace::Catalog cat 
)

Definition at line 623 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, get_data_sink(), anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelAlgNode::inputCount(), join(), run_benchmark_import::result, and RexVisitorBase< T >::visit().

Referenced by get_input_desc_impl().

624  {
625  const auto data_sink_node = get_data_sink(ra_node);
626  if (auto join = dynamic_cast<const RelJoin*>(data_sink_node)) {
627  CHECK_EQ(join->inputCount(), 2u);
628  const auto condition = join->getCondition();
629  RexUsedInputsVisitor visitor(cat);
630  auto condition_inputs = visitor.visit(condition);
631  std::vector<std::shared_ptr<RexInput>> condition_inputs_owned(
632  visitor.get_inputs_owned());
633  return std::make_pair(condition_inputs, condition_inputs_owned);
634  }
635 
636  if (auto left_deep_join = dynamic_cast<const RelLeftDeepInnerJoin*>(data_sink_node)) {
637  CHECK_GE(left_deep_join->inputCount(), 2u);
638  const auto condition = left_deep_join->getInnerCondition();
639  RexUsedInputsVisitor visitor(cat);
640  auto result = visitor.visit(condition);
641  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
642  ++nesting_level) {
643  const auto outer_condition = left_deep_join->getOuterCondition(nesting_level);
644  if (outer_condition) {
645  const auto outer_result = visitor.visit(outer_condition);
646  result.insert(outer_result.begin(), outer_result.end());
647  }
648  }
649  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
650  return std::make_pair(result, used_inputs_owned);
651  }
652 
653  CHECK_EQ(ra_node->inputCount(), 1u);
654  return std::make_pair(std::unordered_set<const RexInput*>{},
655  std::vector<std::shared_ptr<RexInput>>{});
656 }
#define CHECK_EQ(x, y)
Definition: Logger.h:198
std::string join(T const &container, std::string const &delim)
#define CHECK_GE(x, y)
Definition: Logger.h:203
const size_t inputCount() const
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

JoinType anonymous_namespace{RelAlgExecutor.cpp}::get_join_type ( const RelAlgNode ra)

Definition at line 2354 of file RelAlgExecutor.cpp.

References get_data_sink(), INNER, INVALID, and join().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createFilterWorkUnit(), RelAlgExecutor::createModifyCompoundWorkUnit(), RelAlgExecutor::createModifyProjectWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

2354  {
2355  auto sink = get_data_sink(ra);
2356  if (auto join = dynamic_cast<const RelJoin*>(sink)) {
2357  return join->getJoinType();
2358  }
2359  if (dynamic_cast<const RelLeftDeepInnerJoin*>(sink)) {
2360  return JoinType::INNER;
2361  }
2362 
2363  return JoinType::INVALID;
2364 }
std::string join(T const &container, std::string const &delim)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<size_t> anonymous_namespace{RelAlgExecutor.cpp}::get_left_deep_join_input_sizes ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 2479 of file RelAlgExecutor.cpp.

References get_node_output(), RelAlgNode::getInput(), and RelAlgNode::inputCount().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createProjectWorkUnit().

2480  {
2481  std::vector<size_t> input_sizes;
2482  for (size_t i = 0; i < left_deep_join->inputCount(); ++i) {
2483  const auto inputs = get_node_output(left_deep_join->getInput(i));
2484  input_sizes.push_back(inputs.size());
2485  }
2486  return input_sizes;
2487 }
const RelAlgNode * getInput(const size_t idx) const
RANodeOutput get_node_output(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

SQLTypeInfo anonymous_namespace{RelAlgExecutor.cpp}::get_logical_type_for_expr ( const Analyzer::Expr expr)
inline

Definition at line 1090 of file RelAlgExecutor.cpp.

References get_logical_type_info(), get_nullable_logical_type_info(), Analyzer::Expr::get_type_info(), is_agg(), is_count_distinct(), and kBIGINT.

Referenced by get_targets_meta().

1090  {
1091  if (is_count_distinct(&expr)) {
1092  return SQLTypeInfo(kBIGINT, false);
1093  } else if (is_agg(&expr)) {
1095  }
1096  return get_logical_type_info(expr.get_type_info());
1097 }
bool is_agg(const Analyzer::Expr *expr)
SQLTypeInfo get_nullable_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:884
SQLTypeInfo get_logical_type_info(const SQLTypeInfo &type_info)
Definition: sqltypes.h:869
bool is_count_distinct(const Analyzer::Expr *expr)
SQLTypeInfoCore< ArrayContextTypeSizer, ExecutorTypePackaging, DateTimeFacilities > SQLTypeInfo
Definition: sqltypes.h:852
const SQLTypeInfo & get_type_info() const
Definition: Analyzer.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_modify_manipulated_targets_meta ( ModifyManipulationTarget const *  manip_node,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1073 of file RelAlgExecutor.cpp.

References CHECK(), ModifyManipulationTarget::getTargetColumnCount(), ModifyManipulationTarget::getTargetColumns(), is_count_distinct(), and kBIGINT.

Referenced by RelAlgExecutor::createModifyCompoundWorkUnit(), and RelAlgExecutor::createModifyProjectWorkUnit().

1075  {
1076  std::vector<TargetMetaInfo> targets_meta;
1077 
1078  for (int i = 0; i < (manip_node->getTargetColumnCount()); ++i) {
1079  CHECK(target_exprs[i]);
1080  // TODO(alex): remove the count distinct type fixup.
1081  targets_meta.emplace_back(manip_node->getTargetColumns()[i],
1082  is_count_distinct(target_exprs[i])
1083  ? SQLTypeInfo(kBIGINT, false)
1084  : target_exprs[i]->get_type_info());
1085  }
1086 
1087  return targets_meta;
1088 }
bool is_count_distinct(const Analyzer::Expr *expr)
CHECK(cgen_state)
SQLTypeInfoCore< ArrayContextTypeSizer, ExecutorTypePackaging, DateTimeFacilities > SQLTypeInfo
Definition: sqltypes.h:852

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<const RelAlgNode*> anonymous_namespace{RelAlgExecutor.cpp}::get_non_join_sequence ( const RelAlgNode ra)

Definition at line 658 of file RelAlgExecutor.cpp.

References CHECK_EQ, and join().

Referenced by collect_used_input_desc().

658  {
659  std::vector<const RelAlgNode*> seq;
660  for (auto join = dynamic_cast<const RelJoin*>(ra); join;
661  join = static_cast<const RelJoin*>(join->getInput(0))) {
662  CHECK_EQ(size_t(2), join->inputCount());
663  seq.emplace_back(join->getInput(1));
664  auto lhs = join->getInput(0);
665  if (!dynamic_cast<const RelJoin*>(lhs)) {
666  seq.emplace_back(lhs);
667  break;
668  }
669  }
670  std::reverse(seq.begin(), seq.end());
671  return seq;
672 }
#define CHECK_EQ(x, y)
Definition: Logger.h:198
std::string join(T const &container, std::string const &delim)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<Analyzer::OrderEntry> anonymous_namespace{RelAlgExecutor.cpp}::get_order_entries ( const RelSort sort)

Definition at line 1605 of file RelAlgExecutor.cpp.

References RelSort::collationCount(), Descending, First, RelSort::getCollation(), and run_benchmark_import::result.

Referenced by RelAlgExecutor::createSortInputWorkUnit(), and RelAlgExecutor::executeSort().

1605  {
1606  std::list<Analyzer::OrderEntry> result;
1607  for (size_t i = 0; i < sort->collationCount(); ++i) {
1608  const auto sort_field = sort->getCollation(i);
1609  result.emplace_back(sort_field.getField() + 1,
1610  sort_field.getSortDir() == SortDirection::Descending,
1611  sort_field.getNullsPosition() == NullSortedPosition::First);
1612  }
1613  return result;
1614 }
SortField getCollation(const size_t i) const
size_t collationCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::unordered_set<PhysicalInput> anonymous_namespace{RelAlgExecutor.cpp}::get_physical_inputs ( const Catalog_Namespace::Catalog cat,
const RelAlgNode ra 
)

Definition at line 52 of file RelAlgExecutor.cpp.

References get_physical_inputs(), and Catalog_Namespace::Catalog::getColumnIdBySpi().

54  {
55  auto phys_inputs = get_physical_inputs(ra);
56  std::unordered_set<PhysicalInput> phys_inputs2;
57  for (auto& phi : phys_inputs) {
58  phys_inputs2.insert(
59  PhysicalInput{cat.getColumnIdBySpi(phi.table_id, phi.col_id), phi.table_id});
60  }
61  return phys_inputs2;
62 }
std::unordered_set< PhysicalInput > get_physical_inputs(const RelAlgNode *ra)
const int getColumnIdBySpi(const int tableId, const size_t spi) const
Definition: Catalog.cpp:1432

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelCompound compound)

Definition at line 780 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSourcesSize().

Referenced by RelAlgExecutor::createModifyCompoundWorkUnit(), RelAlgExecutor::createModifyProjectWorkUnit(), translate_scalar_sources(), translate_scalar_sources_for_update(), and translate_targets_for_update().

780  {
781  return compound->getScalarSourcesSize();
782 }
const size_t getScalarSourcesSize() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelProject project)

Definition at line 784 of file RelAlgExecutor.cpp.

References RelProject::size().

784  {
785  return project->size();
786 }
size_t size() const override

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scalar_sources_size ( const RelTableFunction table_func)

Definition at line 788 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputsSize().

788  {
789  return table_func->getTableFuncInputsSize();
790 }
size_t getTableFuncInputsSize() const

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::get_scan_limit ( const RelAlgNode ra,
const size_t  limit 
)

Definition at line 1616 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::createSortInputWorkUnit().

1616  {
1617  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
1618  if (aggregate) {
1619  return 0;
1620  }
1621  const auto compound = dynamic_cast<const RelCompound*>(ra);
1622  return (compound && compound->isAggregate()) ? 0 : limit;
1623 }

+ Here is the caller graph for this function:

template<class RA >
std::vector<TargetMetaInfo> anonymous_namespace{RelAlgExecutor.cpp}::get_targets_meta ( const RA *  ra_node,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 1100 of file RelAlgExecutor.cpp.

References CHECK(), and get_logical_type_for_expr().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), RelAlgExecutor::createTableFunctionWorkUnit(), and get_inputs_meta().

1102  {
1103  std::vector<TargetMetaInfo> targets_meta;
1104  for (size_t i = 0; i < ra_node->size(); ++i) {
1105  CHECK(target_exprs[i]);
1106  // TODO(alex): remove the count distinct type fixup.
1107  targets_meta.emplace_back(ra_node->getFieldName(i),
1108  get_logical_type_for_expr(*target_exprs[i]),
1109  target_exprs[i]->get_type_info());
1110  }
1111  return targets_meta;
1112 }
CHECK(cgen_state)
SQLTypeInfo get_logical_type_for_expr(const Analyzer::Expr &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelCompound compound,
const Catalog_Namespace::Catalog cat 
)

Definition at line 503 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelCompound::getFilterExpr(), RelCompound::getScalarSource(), RelCompound::getScalarSourcesSize(), and RexVisitorBase< T >::visit().

Referenced by get_input_desc().

503  {
504  RexUsedInputsVisitor visitor(cat);
505  const auto filter_expr = compound->getFilterExpr();
506  std::unordered_set<const RexInput*> used_inputs =
507  filter_expr ? visitor.visit(filter_expr) : std::unordered_set<const RexInput*>{};
508  const auto sources_size = compound->getScalarSourcesSize();
509  for (size_t i = 0; i < sources_size; ++i) {
510  const auto source_inputs = visitor.visit(compound->getScalarSource(i));
511  used_inputs.insert(source_inputs.begin(), source_inputs.end());
512  }
513  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
514  return std::make_pair(used_inputs, used_inputs_owned);
515 }
const RexScalar * getFilterExpr() const
const size_t getScalarSourcesSize() const
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelAggregate aggregate,
const Catalog_Namespace::Catalog cat 
)

Definition at line 518 of file RelAlgExecutor.cpp.

References CHECK_EQ, CHECK_GE, RelAggregate::getAggExprs(), RelAggregate::getGroupByCount(), RelAlgNode::getInput(), RelAlgNode::getOutputMetainfo(), and RelAlgNode::inputCount().

518  {
519  CHECK_EQ(size_t(1), aggregate->inputCount());
520  std::unordered_set<const RexInput*> used_inputs;
521  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
522  const auto source = aggregate->getInput(0);
523  const auto& in_metainfo = source->getOutputMetainfo();
524  const auto group_count = aggregate->getGroupByCount();
525  CHECK_GE(in_metainfo.size(), group_count);
526  for (size_t i = 0; i < group_count; ++i) {
527  auto synthesized_used_input = new RexInput(source, i);
528  used_inputs_owned.emplace_back(synthesized_used_input);
529  used_inputs.insert(synthesized_used_input);
530  }
531  for (const auto& agg_expr : aggregate->getAggExprs()) {
532  for (size_t i = 0; i < agg_expr->size(); ++i) {
533  const auto operand_idx = agg_expr->getOperand(i);
534  CHECK_GE(in_metainfo.size(), static_cast<size_t>(operand_idx));
535  auto synthesized_used_input = new RexInput(source, operand_idx);
536  used_inputs_owned.emplace_back(synthesized_used_input);
537  used_inputs.insert(synthesized_used_input);
538  }
539  }
540  return std::make_pair(used_inputs, used_inputs_owned);
541 }
const size_t getGroupByCount() const
#define CHECK_EQ(x, y)
Definition: Logger.h:198
#define CHECK_GE(x, y)
Definition: Logger.h:203
const RelAlgNode * getInput(const size_t idx) const
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
const size_t inputCount() const
const std::vector< TargetMetaInfo > & getOutputMetainfo() const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelProject project,
const Catalog_Namespace::Catalog cat 
)

Definition at line 544 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelProject::getProjectAt(), RelProject::size(), and RexVisitorBase< T >::visit().

544  {
545  RexUsedInputsVisitor visitor(cat);
546  std::unordered_set<const RexInput*> used_inputs;
547  for (size_t i = 0; i < project->size(); ++i) {
548  const auto proj_inputs = visitor.visit(project->getProjectAt(i));
549  used_inputs.insert(proj_inputs.begin(), proj_inputs.end());
550  }
551  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
552  return std::make_pair(used_inputs, used_inputs_owned);
553 }
size_t size() const override
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelTableFunction table_func,
const Catalog_Namespace::Catalog cat 
)

Definition at line 556 of file RelAlgExecutor.cpp.

References anonymous_namespace{RelAlgExecutor.cpp}::RexUsedInputsVisitor::get_inputs_owned(), RelTableFunction::getTableFuncInputAt(), RelTableFunction::getTableFuncInputsSize(), and RexVisitorBase< T >::visit().

557  {
558  RexUsedInputsVisitor visitor(cat);
559  std::unordered_set<const RexInput*> used_inputs;
560  for (size_t i = 0; i < table_func->getTableFuncInputsSize(); ++i) {
561  const auto table_func_inputs = visitor.visit(table_func->getTableFuncInputAt(i));
562  used_inputs.insert(table_func_inputs.begin(), table_func_inputs.end());
563  }
564  std::vector<std::shared_ptr<RexInput>> used_inputs_owned(visitor.get_inputs_owned());
565  return std::make_pair(used_inputs, used_inputs_owned);
566 }
size_t getTableFuncInputsSize() const
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::pair<std::unordered_set<const RexInput*>, std::vector<std::shared_ptr<RexInput> > > anonymous_namespace{RelAlgExecutor.cpp}::get_used_inputs ( const RelFilter filter,
const Catalog_Namespace::Catalog cat 
)

Definition at line 569 of file RelAlgExecutor.cpp.

References CHECK(), and get_data_sink().

569  {
570  std::unordered_set<const RexInput*> used_inputs;
571  std::vector<std::shared_ptr<RexInput>> used_inputs_owned;
572  const auto data_sink_node = get_data_sink(filter);
573  for (size_t nest_level = 0; nest_level < data_sink_node->inputCount(); ++nest_level) {
574  const auto source = data_sink_node->getInput(nest_level);
575  const auto scan_source = dynamic_cast<const RelScan*>(source);
576  if (scan_source) {
577  CHECK(source->getOutputMetainfo().empty());
578  for (size_t i = 0; i < scan_source->size(); ++i) {
579  auto synthesized_used_input = new RexInput(scan_source, i);
580  used_inputs_owned.emplace_back(synthesized_used_input);
581  used_inputs.insert(synthesized_used_input);
582  }
583  } else {
584  const auto& partial_in_metadata = source->getOutputMetainfo();
585  for (size_t i = 0; i < partial_in_metadata.size(); ++i) {
586  auto synthesized_used_input = new RexInput(source, i);
587  used_inputs_owned.emplace_back(synthesized_used_input);
588  used_inputs.insert(synthesized_used_input);
589  }
590  }
591  }
592  return std::make_pair(used_inputs, used_inputs_owned);
593 }
CHECK(cgen_state)
const RelAlgNode * get_data_sink(const RelAlgNode *ra_node)

+ Here is the call graph for this function:

size_t anonymous_namespace{RelAlgExecutor.cpp}::groups_approx_upper_bound ( const std::vector< InputTableInfo > &  table_infos)

Upper bound estimation for the number of groups. Not strictly correct and not tight, but if the tables involved are really small we shouldn't waste time doing the NDV estimation. We don't account for cross-joins and / or group by unnested array, which is the reason this estimation isn't entirely reliable.

Definition at line 1801 of file RelAlgExecutor.cpp.

References CHECK().

Referenced by RelAlgExecutor::executeWorkUnit().

1801  {
1802  CHECK(!table_infos.empty());
1803  const auto& first_table = table_infos.front();
1804  size_t max_num_groups = first_table.info.getNumTuplesUpperBound();
1805  for (const auto& table_info : table_infos) {
1806  if (table_info.info.getNumTuplesUpperBound() > max_num_groups) {
1807  max_num_groups = table_info.info.getNumTuplesUpperBound();
1808  }
1809  }
1810  return std::max(max_num_groups, size_t(1));
1811 }
CHECK(cgen_state)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_agg ( const Analyzer::Expr expr)

Definition at line 1061 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_aggtype(), kAVG, kMAX, kMIN, and kSUM.

Referenced by anonymous_namespace{RelAlgAbstractInterpreter.cpp}::create_compound(), RelAlgExecutor::executeWorkUnit(), get_logical_type_for_expr(), ResultSet::getSingleSlotTargetBitmap(), and Planner::Optimizer::optimize_aggs().

1061  {
1062  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1063  if (agg_expr && agg_expr->get_contains_agg()) {
1064  auto agg_type = agg_expr->get_aggtype();
1065  if (agg_type == SQLAgg::kMIN || agg_type == SQLAgg::kMAX ||
1066  agg_type == SQLAgg::kSUM || agg_type == SQLAgg::kAVG) {
1067  return true;
1068  }
1069  }
1070  return false;
1071 }
Definition: sqldefs.h:71
Definition: sqldefs.h:71
SQLAgg get_aggtype() const
Definition: Analyzer.h:1044
Definition: sqldefs.h:71
Definition: sqldefs.h:71

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_count_distinct ( const Analyzer::Expr expr)

Definition at line 1056 of file RelAlgExecutor.cpp.

References Analyzer::AggExpr::get_is_distinct().

Referenced by get_logical_type_for_expr(), and get_modify_manipulated_targets_meta().

1056  {
1057  const auto agg_expr = dynamic_cast<const Analyzer::AggExpr*>(expr);
1058  return agg_expr && agg_expr->get_is_distinct();
1059 }
bool get_is_distinct() const
Definition: Analyzer.h:1047

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::is_window_execution_unit ( const RelAlgExecutionUnit ra_exe_unit)

Definition at line 1329 of file RelAlgExecutor.cpp.

References RelAlgExecutionUnit::target_exprs.

Referenced by RelAlgExecutor::executeWorkUnit().

1329  {
1330  return std::any_of(ra_exe_unit.target_exprs.begin(),
1331  ra_exe_unit.target_exprs.end(),
1332  [](const Analyzer::Expr* expr) {
1333  return dynamic_cast<const Analyzer::WindowFunction*>(expr);
1334  });
1335 }
std::vector< Analyzer::Expr * > target_exprs

+ Here is the caller graph for this function:

std::vector<JoinType> anonymous_namespace{RelAlgExecutor.cpp}::left_deep_join_types ( const RelLeftDeepInnerJoin left_deep_join)

Definition at line 2434 of file RelAlgExecutor.cpp.

References CHECK_GE, RelLeftDeepInnerJoin::getOuterCondition(), INNER, RelAlgNode::inputCount(), and LEFT.

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createModifyCompoundWorkUnit(), RelAlgExecutor::createModifyProjectWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::translateLeftDeepJoinFilter().

2434  {
2435  CHECK_GE(left_deep_join->inputCount(), size_t(2));
2436  std::vector<JoinType> join_types(left_deep_join->inputCount() - 1, JoinType::INNER);
2437  for (size_t nesting_level = 1; nesting_level <= left_deep_join->inputCount() - 1;
2438  ++nesting_level) {
2439  if (left_deep_join->getOuterCondition(nesting_level)) {
2440  join_types[nesting_level - 1] = JoinType::LEFT;
2441  }
2442  }
2443  return join_types;
2444 }
const RexScalar * getOuterCondition(const size_t nesting_level) const
#define CHECK_GE(x, y)
Definition: Logger.h:203
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class QualsList >
bool anonymous_namespace{RelAlgExecutor.cpp}::list_contains_expression ( const QualsList &  haystack,
const std::shared_ptr< Analyzer::Expr > &  needle 
)

Definition at line 2714 of file RelAlgExecutor.cpp.

Referenced by reverse_logical_distribution().

2715  {
2716  for (const auto& qual : haystack) {
2717  if (*qual == *needle) {
2718  return true;
2719  }
2720  }
2721  return false;
2722 }

+ Here is the caller graph for this function:

bool anonymous_namespace{RelAlgExecutor.cpp}::node_is_aggregate ( const RelAlgNode ra)

Definition at line 46 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::executeRelAlgQuerySingleStep(), and RelAlgExecutor::executeSort().

46  {
47  const auto compound = dynamic_cast<const RelCompound*>(ra);
48  const auto aggregate = dynamic_cast<const RelAggregate*>(ra);
49  return ((compound && compound->isAggregate()) || aggregate);
50 }

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::reverse_logical_distribution ( const std::shared_ptr< Analyzer::Expr > &  expr)

Definition at line 2727 of file RelAlgExecutor.cpp.

References build_logical_expression(), CHECK_GE, kAND, kONE, kOR, list_contains_expression(), Parser::OperExpr::normalize(), qual_to_conjunctive_form(), and qual_to_disjunctive_form().

Referenced by RelAlgExecutor::makeJoinQuals().

2728  {
2729  const auto expr_terms = qual_to_disjunctive_form(expr);
2730  CHECK_GE(expr_terms.size(), size_t(1));
2731  const auto& first_term = expr_terms.front();
2732  const auto first_term_factors = qual_to_conjunctive_form(first_term);
2733  std::vector<std::shared_ptr<Analyzer::Expr>> common_factors;
2734  // First, collect the conjunctive components common to all the disjunctive components.
2735  // Don't do it for simple qualifiers, we only care about expensive or join qualifiers.
2736  for (const auto& first_term_factor : first_term_factors.quals) {
2737  bool is_common =
2738  expr_terms.size() > 1; // Only report common factors for disjunction.
2739  for (size_t i = 1; i < expr_terms.size(); ++i) {
2740  const auto crt_term_factors = qual_to_conjunctive_form(expr_terms[i]);
2741  if (!list_contains_expression(crt_term_factors.quals, first_term_factor)) {
2742  is_common = false;
2743  break;
2744  }
2745  }
2746  if (is_common) {
2747  common_factors.push_back(first_term_factor);
2748  }
2749  }
2750  if (common_factors.empty()) {
2751  return expr;
2752  }
2753  // Now that the common expressions are known, collect the remaining expressions.
2754  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_terms;
2755  for (const auto& term : expr_terms) {
2756  const auto term_cf = qual_to_conjunctive_form(term);
2757  std::vector<std::shared_ptr<Analyzer::Expr>> remaining_quals(
2758  term_cf.simple_quals.begin(), term_cf.simple_quals.end());
2759  for (const auto& qual : term_cf.quals) {
2760  if (!list_contains_expression(common_factors, qual)) {
2761  remaining_quals.push_back(qual);
2762  }
2763  }
2764  if (!remaining_quals.empty()) {
2765  remaining_terms.push_back(build_logical_expression(remaining_quals, kAND));
2766  }
2767  }
2768  // Reconstruct the expression with the transformation applied.
2769  const auto common_expr = build_logical_expression(common_factors, kAND);
2770  if (remaining_terms.empty()) {
2771  return common_expr;
2772  }
2773  const auto remaining_expr = build_logical_expression(remaining_terms, kOR);
2774  return Parser::OperExpr::normalize(kAND, kONE, common_expr, remaining_expr);
2775 }
Definition: sqldefs.h:38
#define CHECK_GE(x, y)
Definition: Logger.h:203
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
bool list_contains_expression(const QualsList &haystack, const std::shared_ptr< Analyzer::Expr > &needle)
Definition: sqldefs.h:37
std::shared_ptr< Analyzer::Expr > build_logical_expression(const std::vector< std::shared_ptr< Analyzer::Expr >> &factors, const SQLOps sql_op)
static std::shared_ptr< Analyzer::Expr > normalize(const SQLOps optype, const SQLQualifier qual, std::shared_ptr< Analyzer::Expr > left_expr, std::shared_ptr< Analyzer::Expr > right_expr)
Definition: ParserNode.cpp:261
Definition: sqldefs.h:69
std::vector< std::shared_ptr< Analyzer::Expr > > qual_to_disjunctive_form(const std::shared_ptr< Analyzer::Expr > &qual_expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::rewrite_quals ( const std::list< std::shared_ptr< Analyzer::Expr >> &  quals)

Definition at line 2489 of file RelAlgExecutor.cpp.

References rewrite_expr().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createModifyCompoundWorkUnit().

2490  {
2491  std::list<std::shared_ptr<Analyzer::Expr>> rewritten_quals;
2492  for (const auto& qual : quals) {
2493  const auto rewritten_qual = rewrite_expr(qual.get());
2494  rewritten_quals.push_back(rewritten_qual ? rewritten_qual : qual);
2495  }
2496  return rewritten_quals;
2497 }
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<const RexScalar*> anonymous_namespace{RelAlgExecutor.cpp}::rex_to_conjunctive_form ( const RexScalar qual_expr)

Definition at line 2687 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, and kAND.

Referenced by RelAlgExecutor::makeJoinQuals().

2687  {
2688  CHECK(qual_expr);
2689  const auto bin_oper = dynamic_cast<const RexOperator*>(qual_expr);
2690  if (!bin_oper || bin_oper->getOperator() != kAND) {
2691  return {qual_expr};
2692  }
2693  CHECK_GE(bin_oper->size(), size_t(2));
2694  auto lhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(0));
2695  for (size_t i = 1; i < bin_oper->size(); ++i) {
2696  const auto rhs_cf = rex_to_conjunctive_form(bin_oper->getOperand(i));
2697  lhs_cf.insert(lhs_cf.end(), rhs_cf.begin(), rhs_cf.end());
2698  }
2699  return lhs_cf;
2700 }
#define CHECK_GE(x, y)
Definition: Logger.h:203
std::vector< const RexScalar * > rex_to_conjunctive_form(const RexScalar *qual_expr)
CHECK(cgen_state)
Definition: sqldefs.h:37

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelCompound compound 
)

Definition at line 792 of file RelAlgExecutor.cpp.

References RelCompound::getScalarSource().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

792  {
793  return compound->getScalarSource(i);
794 }
const RexScalar * getScalarSource(const size_t i) const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelProject project 
)

Definition at line 796 of file RelAlgExecutor.cpp.

References RelProject::getProjectAt().

796  {
797  return project->getProjectAt(i);
798 }
const RexScalar * getProjectAt(const size_t idx) const

+ Here is the call graph for this function:

const RexScalar* anonymous_namespace{RelAlgExecutor.cpp}::scalar_at ( const size_t  i,
const RelTableFunction table_func 
)

Definition at line 800 of file RelAlgExecutor.cpp.

References RelTableFunction::getTableFuncInputAt().

800  {
801  return table_func->getTableFuncInputAt(i);
802 }
const RexScalar * getTableFuncInputAt(const size_t idx) const

+ Here is the call graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict ( const std::shared_ptr< Analyzer::Expr expr)

Definition at line 804 of file RelAlgExecutor.cpp.

References kENCODING_DICT, kENCODING_NONE, and TRANSIENT_DICT_ID.

805  {
806  const auto& ti = expr->get_type_info();
807  if (!ti.is_string() || ti.get_compression() != kENCODING_NONE) {
808  return expr;
809  }
810  auto transient_dict_ti = ti;
811  transient_dict_ti.set_compression(kENCODING_DICT);
812  transient_dict_ti.set_comp_param(TRANSIENT_DICT_ID);
813  transient_dict_ti.set_fixed_size();
814  return expr->add_cast(transient_dict_ti);
815 }
#define TRANSIENT_DICT_ID
Definition: sqltypes.h:189
void anonymous_namespace{RelAlgExecutor.cpp}::set_transient_dict_maybe ( std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::shared_ptr< Analyzer::Expr > &  expr 
)

Definition at line 817 of file RelAlgExecutor.cpp.

References fold_expr(), and anonymous_namespace{CalciteAdapter.cpp}::set_transient_dict().

Referenced by translate_scalar_sources(), and translate_scalar_sources_for_update().

819  {
820  try {
821  scalar_sources.push_back(set_transient_dict(fold_expr(expr.get())));
822  } catch (...) {
823  scalar_sources.push_back(fold_expr(expr.get()));
824  }
825 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::synthesize_inputs ( const RelAlgNode ra_node,
const size_t  nest_level,
const std::vector< TargetMetaInfo > &  in_metainfo,
const std::unordered_map< const RelAlgNode *, int > &  input_to_nest_level 
)

Definition at line 2854 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, CHECK_LE, RelAlgNode::getInput(), RelAlgNode::inputCount(), and table_id_from_ra().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and get_inputs_meta().

2858  {
2859  CHECK_LE(size_t(1), ra_node->inputCount());
2860  CHECK_GE(size_t(2), ra_node->inputCount());
2861  const auto input = ra_node->getInput(nest_level);
2862  const auto it_rte_idx = input_to_nest_level.find(input);
2863  CHECK(it_rte_idx != input_to_nest_level.end());
2864  const int rte_idx = it_rte_idx->second;
2865  const int table_id = table_id_from_ra(input);
2866  std::vector<std::shared_ptr<Analyzer::Expr>> inputs;
2867  const auto scan_ra = dynamic_cast<const RelScan*>(input);
2868  int input_idx = 0;
2869  for (const auto& input_meta : in_metainfo) {
2870  inputs.push_back(
2871  std::make_shared<Analyzer::ColumnVar>(input_meta.get_type_info(),
2872  table_id,
2873  scan_ra ? input_idx + 1 : input_idx,
2874  rte_idx));
2875  ++input_idx;
2876  }
2877  return inputs;
2878 }
#define CHECK_GE(x, y)
Definition: Logger.h:203
CHECK(cgen_state)
const RelAlgNode * getInput(const size_t idx) const
#define CHECK_LE(x, y)
Definition: Logger.h:201
int table_id_from_ra(const RelAlgNode *ra_node)
const size_t inputCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

int anonymous_namespace{RelAlgExecutor.cpp}::table_id_from_ra ( const RelAlgNode ra_node)

Definition at line 595 of file RelAlgExecutor.cpp.

References CHECK(), RelAlgNode::getId(), and RelScan::getTableDescriptor().

Referenced by collect_used_input_desc(), get_input_desc_impl(), and synthesize_inputs().

595  {
596  const auto scan_ra = dynamic_cast<const RelScan*>(ra_node);
597  if (scan_ra) {
598  const auto td = scan_ra->getTableDescriptor();
599  CHECK(td);
600  return td->tableId;
601  }
602  return -ra_node->getId();
603 }
CHECK(cgen_state)
unsigned getId() const
const TableDescriptor * getTableDescriptor() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::shared_ptr<Analyzer::Expr> anonymous_namespace{RelAlgExecutor.cpp}::transform_to_inner ( const Analyzer::Expr expr)

Definition at line 1418 of file RelAlgExecutor.cpp.

Referenced by RelAlgExecutor::computeWindow().

1418  {
1419  const auto tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(expr);
1420  if (tuple) {
1421  std::vector<std::shared_ptr<Analyzer::Expr>> transformed_tuple;
1422  for (const auto& element : tuple->getTuple()) {
1423  transformed_tuple.push_back(transform_to_inner(element.get()));
1424  }
1425  return makeExpr<Analyzer::ExpressionTuple>(transformed_tuple);
1426  }
1427  const auto col = dynamic_cast<const Analyzer::ColumnVar*>(expr);
1428  if (!col) {
1429  throw std::runtime_error("Only columns supported in the window partition for now");
1430  }
1431  return makeExpr<Analyzer::ColumnVar>(
1432  col->get_type_info(), col->get_table_id(), col->get_column_id(), 1);
1433 }
std::shared_ptr< Analyzer::Expr > transform_to_inner(const Analyzer::Expr *expr)

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelCompound compound,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 908 of file RelAlgExecutor.cpp.

References RelCompound::getGroupByCount(), RelCompound::isAggregate(), and anonymous_namespace{CalciteAdapter.cpp}::set_transient_dict().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createModifyCompoundWorkUnit().

910  {
911  if (!compound->isAggregate()) {
912  return {nullptr};
913  }
914  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
915  for (size_t group_idx = 0; group_idx < compound->getGroupByCount(); ++group_idx) {
916  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
917  }
918  return groupby_exprs;
919 }
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
const size_t getGroupByCount() const

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::list<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_groupby_exprs ( const RelAggregate aggregate,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources 
)

Definition at line 921 of file RelAlgExecutor.cpp.

References RelAggregate::getGroupByCount(), and anonymous_namespace{CalciteAdapter.cpp}::set_transient_dict().

923  {
924  std::list<std::shared_ptr<Analyzer::Expr>> groupby_exprs;
925  for (size_t group_idx = 0; group_idx < aggregate->getGroupByCount(); ++group_idx) {
926  groupby_exprs.push_back(set_transient_dict(scalar_sources[group_idx]));
927  }
928  return groupby_exprs;
929 }
const size_t getGroupByCount() const
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)

+ Here is the call graph for this function:

QualsConjunctiveForm anonymous_namespace{RelAlgExecutor.cpp}::translate_quals ( const RelCompound compound,
const RelAlgTranslator translator 
)

Definition at line 931 of file RelAlgExecutor.cpp.

References fold_expr(), RelCompound::getFilterExpr(), qual_to_conjunctive_form(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), and RelAlgExecutor::createModifyCompoundWorkUnit().

932  {
933  const auto filter_rex = compound->getFilterExpr();
934  const auto filter_expr =
935  filter_rex ? translator.translateScalarRex(filter_rex) : nullptr;
936  return filter_expr ? qual_to_conjunctive_form(fold_expr(filter_expr.get()))
938 }
const RexScalar * getFilterExpr() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
QualsConjunctiveForm qual_to_conjunctive_form(const std::shared_ptr< Analyzer::Expr > qual_expr)
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources ( const RA *  ra_node,
const RelAlgTranslator translator 
)

Definition at line 828 of file RelAlgExecutor.cpp.

References get_scalar_sources_size(), rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createCompoundWorkUnit(), RelAlgExecutor::createProjectWorkUnit(), and RelAlgExecutor::createTableFunctionWorkUnit().

830  {
831  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
832  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
833  const auto scalar_rex = scalar_at(i, ra_node);
834  if (dynamic_cast<const RexRef*>(scalar_rex)) {
835  // RexRef are synthetic scalars we append at the end of the real ones
836  // for the sake of taking memory ownership, no real work needed here.
837  continue;
838  }
839 
840  const auto scalar_expr =
841  rewrite_array_elements(translator.translateScalarRex(scalar_rex).get());
842  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
843  set_transient_dict_maybe(scalar_sources, rewritten_expr);
844  }
845 
846  return scalar_sources;
847 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

template<class RA >
std::vector<std::shared_ptr<Analyzer::Expr> > anonymous_namespace{RelAlgExecutor.cpp}::translate_scalar_sources_for_update ( const RA *  ra_node,
const RelAlgTranslator translator,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const ColumnNameList colNames,
size_t  starting_projection_column_idx 
)

Definition at line 875 of file RelAlgExecutor.cpp.

References cast_to_column_type(), get_scalar_sources_size(), rewrite_array_elements(), rewrite_expr(), scalar_at(), set_transient_dict_maybe(), and RelAlgTranslator::translateScalarRex().

Referenced by RelAlgExecutor::createModifyCompoundWorkUnit(), and RelAlgExecutor::createModifyProjectWorkUnit().

881  {
882  std::vector<std::shared_ptr<Analyzer::Expr>> scalar_sources;
883  for (size_t i = 0; i < get_scalar_sources_size(ra_node); ++i) {
884  const auto scalar_rex = scalar_at(i, ra_node);
885  if (dynamic_cast<const RexRef*>(scalar_rex)) {
886  // RexRef are synthetic scalars we append at the end of the real ones
887  // for the sake of taking memory ownership, no real work needed here.
888  continue;
889  }
890 
891  std::shared_ptr<Analyzer::Expr> translated_expr;
892  if (i >= starting_projection_column_idx && i < get_scalar_sources_size(ra_node) - 1) {
893  translated_expr = cast_to_column_type(translator.translateScalarRex(scalar_rex),
894  tableId,
895  cat,
896  colNames[i - starting_projection_column_idx]);
897  } else {
898  translated_expr = translator.translateScalarRex(scalar_rex);
899  }
900  const auto scalar_expr = rewrite_array_elements(translated_expr.get());
901  const auto rewritten_expr = rewrite_expr(scalar_expr.get());
902  set_transient_dict_maybe(scalar_sources, rewritten_expr);
903  }
904 
905  return scalar_sources;
906 }
Analyzer::ExpressionPtr rewrite_array_elements(Analyzer::Expr const *expr)
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t get_scalar_sources_size(const RelCompound *compound)
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
std::shared_ptr< Analyzer::Expr > cast_to_column_type(std::shared_ptr< Analyzer::Expr > expr, int32_t tableId, const Catalog_Namespace::Catalog &cat, const std::string &colName)
const RexScalar * scalar_at(const size_t i, const RelCompound *compound)
void set_transient_dict_maybe(std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources, const std::shared_ptr< Analyzer::Expr > &expr)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelCompound compound,
const RelAlgTranslator translator 
)

Definition at line 940 of file RelAlgExecutor.cpp.

References CHECK(), CHECK_GE, CHECK_LE, fold_expr(), RexRef::getIndex(), RelCompound::getTargetExpr(), Analyzer::Var::kGROUPBY, rewrite_expr(), anonymous_namespace{CalciteAdapter.cpp}::set_transient_dict(), RelCompound::size(), RelAlgTranslator::translateAggregateRex(), RelAlgTranslator::translateScalarRex(), and var_ref().

Referenced by RelAlgExecutor::createAggregateWorkUnit(), and RelAlgExecutor::createCompoundWorkUnit().

945  {
946  std::vector<Analyzer::Expr*> target_exprs;
947  for (size_t i = 0; i < compound->size(); ++i) {
948  const auto target_rex = compound->getTargetExpr(i);
949  const auto target_rex_agg = dynamic_cast<const RexAgg*>(target_rex);
950  std::shared_ptr<Analyzer::Expr> target_expr;
951  if (target_rex_agg) {
952  target_expr =
953  RelAlgTranslator::translateAggregateRex(target_rex_agg, scalar_sources);
954  } else {
955  const auto target_rex_scalar = dynamic_cast<const RexScalar*>(target_rex);
956  const auto target_rex_ref = dynamic_cast<const RexRef*>(target_rex_scalar);
957  if (target_rex_ref) {
958  const auto ref_idx = target_rex_ref->getIndex();
959  CHECK_GE(ref_idx, size_t(1));
960  CHECK_LE(ref_idx, groupby_exprs.size());
961  const auto groupby_expr = *std::next(groupby_exprs.begin(), ref_idx - 1);
962  target_expr = var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, ref_idx);
963  } else {
964  target_expr = translator.translateScalarRex(target_rex_scalar);
965  auto rewritten_expr = rewrite_expr(target_expr.get());
966  target_expr = fold_expr(rewritten_expr.get());
967  try {
968  target_expr = set_transient_dict(target_expr);
969  } catch (...) {
970  // noop
971  }
972  }
973  }
974  CHECK(target_expr);
975  target_exprs_owned.push_back(target_expr);
976  target_exprs.push_back(target_expr.get());
977  }
978  return target_exprs;
979 }
const Rex * getTargetExpr(const size_t i) const
std::shared_ptr< Analyzer::Expr > set_transient_dict(const std::shared_ptr< Analyzer::Expr > expr)
size_t getIndex() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t size() const override
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1585
#define CHECK_GE(x, y)
Definition: Logger.h:203
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
CHECK(cgen_state)
#define CHECK_LE(x, y)
Definition: Logger.h:201
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelAggregate aggregate,
const RelAlgTranslator translator 
)

Definition at line 981 of file RelAlgExecutor.cpp.

References CHECK(), fold_expr(), RelAggregate::getAggExprs(), Analyzer::Var::kGROUPBY, RelAlgTranslator::translateAggregateRex(), and var_ref().

986  {
987  std::vector<Analyzer::Expr*> target_exprs;
988  size_t group_key_idx = 0;
989  for (const auto& groupby_expr : groupby_exprs) {
990  auto target_expr =
991  var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, group_key_idx++);
992  target_exprs_owned.push_back(target_expr);
993  target_exprs.push_back(target_expr.get());
994  }
995 
996  for (const auto& target_rex_agg : aggregate->getAggExprs()) {
997  auto target_expr =
998  RelAlgTranslator::translateAggregateRex(target_rex_agg.get(), scalar_sources);
999  CHECK(target_expr);
1000  target_expr = fold_expr(target_expr.get());
1001  target_exprs_owned.push_back(target_expr);
1002  target_exprs.push_back(target_expr.get());
1003  }
1004  return target_exprs;
1005 }
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1585
CHECK(cgen_state)
const std::vector< std::unique_ptr< const RexAgg > > & getAggExprs() const
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function:

std::vector<Analyzer::Expr*> anonymous_namespace{RelAlgExecutor.cpp}::translate_targets_for_update ( std::vector< std::shared_ptr< Analyzer::Expr >> &  target_exprs_owned,
const std::vector< std::shared_ptr< Analyzer::Expr >> &  scalar_sources,
const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const RelCompound compound,
const RelAlgTranslator translator,
int32_t  tableId,
const Catalog_Namespace::Catalog cat,
const ColumnNameList colNames,
size_t  starting_projection_column_idx 
)

Definition at line 1007 of file RelAlgExecutor.cpp.

References cast_to_column_type(), CHECK(), CHECK_GE, CHECK_LE, fold_expr(), get_scalar_sources_size(), RexRef::getIndex(), RelCompound::getTargetExpr(), Analyzer::Var::kGROUPBY, rewrite_expr(), RelCompound::size(), RelAlgTranslator::translateAggregateRex(), RelAlgTranslator::translateScalarRex(), and var_ref().

Referenced by RelAlgExecutor::createModifyCompoundWorkUnit().

1016  {
1017  std::vector<Analyzer::Expr*> target_exprs;
1018  for (size_t i = 0; i < compound->size(); ++i) {
1019  const auto target_rex = compound->getTargetExpr(i);
1020  const auto target_rex_agg = dynamic_cast<const RexAgg*>(target_rex);
1021  std::shared_ptr<Analyzer::Expr> target_expr;
1022  if (target_rex_agg) {
1023  target_expr =
1024  RelAlgTranslator::translateAggregateRex(target_rex_agg, scalar_sources);
1025  } else {
1026  const auto target_rex_scalar = dynamic_cast<const RexScalar*>(target_rex);
1027  const auto target_rex_ref = dynamic_cast<const RexRef*>(target_rex_scalar);
1028  if (target_rex_ref) {
1029  const auto ref_idx = target_rex_ref->getIndex();
1030  CHECK_GE(ref_idx, size_t(1));
1031  CHECK_LE(ref_idx, groupby_exprs.size());
1032  const auto groupby_expr = *std::next(groupby_exprs.begin(), ref_idx - 1);
1033  target_expr = var_ref(groupby_expr.get(), Analyzer::Var::kGROUPBY, ref_idx);
1034  } else {
1035  if (i >= starting_projection_column_idx &&
1036  i < get_scalar_sources_size(compound) - 1) {
1037  target_expr =
1038  cast_to_column_type(translator.translateScalarRex(target_rex_scalar),
1039  tableId,
1040  cat,
1041  colNames[i - starting_projection_column_idx]);
1042  } else {
1043  target_expr = translator.translateScalarRex(target_rex_scalar);
1044  }
1045  auto rewritten_expr = rewrite_expr(target_expr.get());
1046  target_expr = fold_expr(rewritten_expr.get());
1047  }
1048  }
1049  CHECK(target_expr);
1050  target_exprs_owned.push_back(target_expr);
1051  target_exprs.push_back(target_expr.get());
1052  }
1053  return target_exprs;
1054 }
const Rex * getTargetExpr(const size_t i) const
size_t getIndex() const
std::shared_ptr< Analyzer::Expr > translateScalarRex(const RexScalar *rex) const
size_t size() const override
size_t get_scalar_sources_size(const RelCompound *compound)
std::shared_ptr< Analyzer::Var > var_ref(const Analyzer::Expr *expr, const Analyzer::Var::WhichRow which_row, const int varno)
Definition: Analyzer.h:1585
#define CHECK_GE(x, y)
Definition: Logger.h:203
Analyzer::ExpressionPtr rewrite_expr(const Analyzer::Expr *expr)
CHECK(cgen_state)
std::shared_ptr< Analyzer::Expr > cast_to_column_type(std::shared_ptr< Analyzer::Expr > expr, int32_t tableId, const Catalog_Namespace::Catalog &cat, const std::string &colName)
#define CHECK_LE(x, y)
Definition: Logger.h:201
std::shared_ptr< Analyzer::Expr > fold_expr(const Analyzer::Expr *expr)
static std::shared_ptr< Analyzer::Expr > translateAggregateRex(const RexAgg *rex, const std::vector< std::shared_ptr< Analyzer::Expr >> &scalar_sources)

+ Here is the call graph for this function:

+ Here is the caller graph for this function: