OmniSciDB  1dac507f6e
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
anonymous_namespace{QueryMemoryDescriptor.cpp} Namespace Reference

Functions

bool is_int_and_no_bigger_than (const SQLTypeInfo &ti, const size_t byte_width)
 
std::vector< ssize_t > target_expr_group_by_indices (const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const std::vector< Analyzer::Expr * > &target_exprs)
 
std::vector< ssize_t > target_expr_proj_indices (const RelAlgExecutionUnit &ra_exe_unit, const Catalog_Namespace::Catalog &cat)
 
int8_t pick_baseline_key_component_width (const ExpressionRange &range, const size_t group_col_width)
 
int8_t pick_baseline_key_width (const RelAlgExecutionUnit &ra_exe_unit, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::string boolToString (const bool val)
 
std::string queryDescTypeToString (const QueryDescriptionType val)
 

Function Documentation

std::string anonymous_namespace{QueryMemoryDescriptor.cpp}::boolToString ( const bool  val)
inline

Definition at line 1111 of file QueryMemoryDescriptor.cpp.

Referenced by QueryMemoryDescriptor::reductionKey(), and QueryMemoryDescriptor::toString().

1111  {
1112  return val ? "True" : "False";
1113 }

+ Here is the caller graph for this function:

bool anonymous_namespace{QueryMemoryDescriptor.cpp}::is_int_and_no_bigger_than ( const SQLTypeInfo ti,
const size_t  byte_width 
)

Definition at line 31 of file QueryMemoryDescriptor.cpp.

References get_bit_width(), and SQLTypeInfoCore< TYPE_FACET_PACK >::is_integer().

Referenced by QueryMemoryDescriptor::pick_target_compact_width().

31  {
32  if (!ti.is_integer()) {
33  return false;
34  }
35  return get_bit_width(ti) <= (byte_width * 8);
36 }
size_t get_bit_width(const SQLTypeInfo &ti)
bool is_integer() const
Definition: sqltypes.h:479

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

int8_t anonymous_namespace{QueryMemoryDescriptor.cpp}::pick_baseline_key_component_width ( const ExpressionRange range,
const size_t  group_col_width 
)

Definition at line 114 of file QueryMemoryDescriptor.cpp.

References Double, EMPTY_KEY_32, Float, ExpressionRange::getIntMax(), ExpressionRange::getType(), ExpressionRange::hasNulls(), Integer, Invalid, and UNREACHABLE.

Referenced by pick_baseline_key_width().

115  {
116  if (range.getType() == ExpressionRangeType::Invalid) {
117  return sizeof(int64_t);
118  }
119  switch (range.getType()) {
121  if (group_col_width == sizeof(int64_t) && range.hasNulls()) {
122  return sizeof(int64_t);
123  }
124  return range.getIntMax() < EMPTY_KEY_32 - 1 ? sizeof(int32_t) : sizeof(int64_t);
127  return sizeof(int64_t); // No compaction for floating point yet.
128  default:
129  UNREACHABLE();
130  }
131  return sizeof(int64_t);
132 }
#define UNREACHABLE()
Definition: Logger.h:234
bool hasNulls() const
ExpressionRangeType getType() const
int64_t getIntMax() const
#define EMPTY_KEY_32

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

int8_t anonymous_namespace{QueryMemoryDescriptor.cpp}::pick_baseline_key_width ( const RelAlgExecutionUnit ra_exe_unit,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 135 of file QueryMemoryDescriptor.cpp.

References getExpressionRange(), RelAlgExecutionUnit::groupby_exprs, and pick_baseline_key_component_width().

137  {
138  int8_t compact_width{4};
139  for (const auto groupby_expr : ra_exe_unit.groupby_exprs) {
140  const auto expr_range = getExpressionRange(groupby_expr.get(), query_infos, executor);
141  compact_width = std::max(compact_width,
143  expr_range, groupby_expr->get_type_info().get_size()));
144  }
145  return compact_width;
146 }
int8_t pick_baseline_key_component_width(const ExpressionRange &range, const size_t group_col_width)
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)

+ Here is the call graph for this function:

std::string anonymous_namespace{QueryMemoryDescriptor.cpp}::queryDescTypeToString ( const QueryDescriptionType  val)
inline

Definition at line 1115 of file QueryMemoryDescriptor.cpp.

References Estimator, GroupByBaselineHash, GroupByPerfectHash, NonGroupedAggregate, Projection, and UNREACHABLE.

Referenced by QueryMemoryDescriptor::reductionKey().

1115  {
1116  switch (val) {
1118  return "Perfect Hash";
1120  return "Baseline Hash";
1122  return "Projection";
1124  return "Non-grouped Aggregate";
1126  return "Estimator";
1127  default:
1128  UNREACHABLE();
1129  }
1130  return "";
1131 }
#define UNREACHABLE()
Definition: Logger.h:234

+ Here is the caller graph for this function:

std::vector<ssize_t> anonymous_namespace{QueryMemoryDescriptor.cpp}::target_expr_group_by_indices ( const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const std::vector< Analyzer::Expr * > &  target_exprs 
)

Definition at line 38 of file QueryMemoryDescriptor.cpp.

References Analyzer::Var::get_varno(), and Analyzer::Var::kGROUPBY.

40  {
41  std::vector<ssize_t> indices(target_exprs.size(), -1);
42  for (size_t target_idx = 0; target_idx < target_exprs.size(); ++target_idx) {
43  const auto target_expr = target_exprs[target_idx];
44  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
45  continue;
46  }
47  const auto var_expr = dynamic_cast<const Analyzer::Var*>(target_expr);
48  if (var_expr && var_expr->get_which_row() == Analyzer::Var::kGROUPBY) {
49  indices[target_idx] = var_expr->get_varno() - 1;
50  continue;
51  }
52  }
53  return indices;
54 }
int get_varno() const
Definition: Analyzer.h:275

+ Here is the call graph for this function:

std::vector<ssize_t> anonymous_namespace{QueryMemoryDescriptor.cpp}::target_expr_proj_indices ( const RelAlgExecutionUnit ra_exe_unit,
const Catalog_Namespace::Catalog cat 
)

Definition at line 56 of file QueryMemoryDescriptor.cpp.

References CHECK(), get_column_descriptor_maybe(), RelAlgExecutionUnit::input_descs, is_real_str_or_array(), kENCODING_NONE, SortInfo::order_entries, RelAlgExecutionUnit::quals, RelAlgExecutionUnit::simple_quals, RelAlgExecutionUnit::sort_info, RelAlgExecutionUnit::target_exprs, and ScalarExprVisitor< T >::visit().

57  {
58  if (ra_exe_unit.input_descs.size() > 1 ||
59  !ra_exe_unit.sort_info.order_entries.empty()) {
60  return {};
61  }
62  std::vector<ssize_t> target_indices(ra_exe_unit.target_exprs.size(), -1);
63  UsedColumnsVisitor columns_visitor;
64  std::unordered_set<int> used_columns;
65  for (const auto& simple_qual : ra_exe_unit.simple_quals) {
66  const auto crt_used_columns = columns_visitor.visit(simple_qual.get());
67  used_columns.insert(crt_used_columns.begin(), crt_used_columns.end());
68  }
69  for (const auto& qual : ra_exe_unit.quals) {
70  const auto crt_used_columns = columns_visitor.visit(qual.get());
71  used_columns.insert(crt_used_columns.begin(), crt_used_columns.end());
72  }
73  for (const auto& target : ra_exe_unit.target_exprs) {
74  const auto col_var = dynamic_cast<const Analyzer::ColumnVar*>(target);
75  if (col_var) {
76  const auto cd = get_column_descriptor_maybe(
77  col_var->get_column_id(), col_var->get_table_id(), cat);
78  if (!cd || !cd->isVirtualCol) {
79  continue;
80  }
81  }
82  const auto crt_used_columns = columns_visitor.visit(target);
83  used_columns.insert(crt_used_columns.begin(), crt_used_columns.end());
84  }
85  for (size_t target_idx = 0; target_idx < ra_exe_unit.target_exprs.size();
86  ++target_idx) {
87  const auto target_expr = ra_exe_unit.target_exprs[target_idx];
88  CHECK(target_expr);
89  const auto& ti = target_expr->get_type_info();
90  const bool is_real_str_or_array =
91  (ti.is_string() && ti.get_compression() == kENCODING_NONE) || ti.is_array();
92  if (is_real_str_or_array) {
93  continue;
94  }
95  if (ti.is_geometry()) {
96  // TODO(adb): Ideally we could determine which physical columns are required for a
97  // given query and fetch only those. For now, we bail on the memory optimization,
98  // since it is possible that adding the physical columns could have unintended
99  // consequences further down the execution path.
100  return {};
101  }
102  const auto col_var = dynamic_cast<const Analyzer::ColumnVar*>(target_expr);
103  if (!col_var) {
104  continue;
105  }
106  if (!is_real_str_or_array &&
107  used_columns.find(col_var->get_column_id()) == used_columns.end()) {
108  target_indices[target_idx] = 0;
109  }
110  }
111  return target_indices;
112 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< Analyzer::OrderEntry > order_entries
T visit(const Analyzer::Expr *expr) const
const std::vector< InputDescriptor > input_descs
CHECK(cgen_state)
const ColumnDescriptor * get_column_descriptor_maybe(const int col_id, const int table_id, const Catalog_Namespace::Catalog &cat)
Definition: Execute.h:171
const SortInfo sort_info
bool is_real_str_or_array(const TargetInfo &target_info)
std::list< std::shared_ptr< Analyzer::Expr > > quals
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals

+ Here is the call graph for this function: