OmniSciDB  04ee39c94c
anonymous_namespace{QueryMemoryDescriptor.cpp} Namespace Reference

Functions

bool is_int_and_no_bigger_than (const SQLTypeInfo &ti, const size_t byte_width)
 
std::vector< ssize_t > target_expr_group_by_indices (const std::list< std::shared_ptr< Analyzer::Expr >> &groupby_exprs, const std::vector< Analyzer::Expr *> &target_exprs)
 
std::vector< ssize_t > target_expr_proj_indices (const RelAlgExecutionUnit &ra_exe_unit, const Catalog_Namespace::Catalog &cat)
 
int8_t pick_baseline_key_component_width (const ExpressionRange &range)
 
int8_t pick_baseline_key_width (const RelAlgExecutionUnit &ra_exe_unit, const std::vector< InputTableInfo > &query_infos, const Executor *executor)
 
std::string boolToString (const bool val)
 
std::string queryDescTypeToString (const QueryDescriptionType val)
 

Function Documentation

◆ boolToString()

std::string anonymous_namespace{QueryMemoryDescriptor.cpp}::boolToString ( const bool  val)
inline

Definition at line 1100 of file QueryMemoryDescriptor.cpp.

Referenced by QueryMemoryDescriptor::toString().

1100  {
1101  return val ? "True" : "False";
1102 }
+ Here is the caller graph for this function:

◆ is_int_and_no_bigger_than()

bool anonymous_namespace{QueryMemoryDescriptor.cpp}::is_int_and_no_bigger_than ( const SQLTypeInfo ti,
const size_t  byte_width 
)

Definition at line 31 of file QueryMemoryDescriptor.cpp.

References get_bit_width(), and SQLTypeInfoCore< TYPE_FACET_PACK >::is_integer().

Referenced by QueryMemoryDescriptor::pick_target_compact_width().

31  {
32  if (!ti.is_integer()) {
33  return false;
34  }
35  return get_bit_width(ti) <= (byte_width * 8);
36 }
bool is_integer() const
Definition: sqltypes.h:452
size_t get_bit_width(const SQLTypeInfo &ti)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ pick_baseline_key_component_width()

int8_t anonymous_namespace{QueryMemoryDescriptor.cpp}::pick_baseline_key_component_width ( const ExpressionRange range)

Definition at line 114 of file QueryMemoryDescriptor.cpp.

References Double, EMPTY_KEY_32, Float, ExpressionRange::getIntMax(), ExpressionRange::getType(), Integer, Invalid, and UNREACHABLE.

Referenced by pick_baseline_key_width().

114  {
115  if (range.getType() == ExpressionRangeType::Invalid) {
116  return sizeof(int64_t);
117  }
118  switch (range.getType()) {
120  return range.getIntMax() < EMPTY_KEY_32 - 1 ? sizeof(int32_t) : sizeof(int64_t);
123  return sizeof(int64_t); // No compaction for floating point yet.
124  default:
125  UNREACHABLE();
126  }
127  return sizeof(int64_t);
128 }
#define UNREACHABLE()
Definition: Logger.h:231
int64_t getIntMax() const
ExpressionRangeType getType() const
#define EMPTY_KEY_32
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ pick_baseline_key_width()

int8_t anonymous_namespace{QueryMemoryDescriptor.cpp}::pick_baseline_key_width ( const RelAlgExecutionUnit ra_exe_unit,
const std::vector< InputTableInfo > &  query_infos,
const Executor executor 
)

Definition at line 131 of file QueryMemoryDescriptor.cpp.

References getExpressionRange(), RelAlgExecutionUnit::groupby_exprs, and pick_baseline_key_component_width().

Referenced by QueryMemoryDescriptor::init().

133  {
134  int8_t compact_width{4};
135  for (const auto groupby_expr : ra_exe_unit.groupby_exprs) {
136  const auto expr_range = getExpressionRange(groupby_expr.get(), query_infos, executor);
137  compact_width =
138  std::max(compact_width, pick_baseline_key_component_width(expr_range));
139  }
140  return compact_width;
141 }
const std::list< std::shared_ptr< Analyzer::Expr > > groupby_exprs
ExpressionRange getExpressionRange(const Analyzer::BinOper *expr, const std::vector< InputTableInfo > &query_infos, const Executor *, boost::optional< std::list< std::shared_ptr< Analyzer::Expr >>> simple_quals)
int8_t pick_baseline_key_component_width(const ExpressionRange &range)
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ queryDescTypeToString()

std::string anonymous_namespace{QueryMemoryDescriptor.cpp}::queryDescTypeToString ( const QueryDescriptionType  val)
inline

Definition at line 1104 of file QueryMemoryDescriptor.cpp.

References Estimator, GroupByBaselineHash, GroupByPerfectHash, NonGroupedAggregate, Projection, and UNREACHABLE.

Referenced by QueryMemoryDescriptor::toString().

1104  {
1105  switch (val) {
1107  return "Perfect Hash";
1109  return "Baseline Hash";
1111  return "Projection";
1113  return "Non-grouped Aggregate";
1115  return "Estimator";
1116  default:
1117  UNREACHABLE();
1118  }
1119  return "";
1120 }
#define UNREACHABLE()
Definition: Logger.h:231
+ Here is the caller graph for this function:

◆ target_expr_group_by_indices()

std::vector<ssize_t> anonymous_namespace{QueryMemoryDescriptor.cpp}::target_expr_group_by_indices ( const std::list< std::shared_ptr< Analyzer::Expr >> &  groupby_exprs,
const std::vector< Analyzer::Expr *> &  target_exprs 
)

Definition at line 38 of file QueryMemoryDescriptor.cpp.

References Analyzer::Var::get_varno(), and Analyzer::Var::kGROUPBY.

Referenced by QueryMemoryDescriptor::init().

40  {
41  std::vector<ssize_t> indices(target_exprs.size(), -1);
42  for (size_t target_idx = 0; target_idx < target_exprs.size(); ++target_idx) {
43  const auto target_expr = target_exprs[target_idx];
44  if (dynamic_cast<const Analyzer::AggExpr*>(target_expr)) {
45  continue;
46  }
47  const auto var_expr = dynamic_cast<const Analyzer::Var*>(target_expr);
48  if (var_expr && var_expr->get_which_row() == Analyzer::Var::kGROUPBY) {
49  indices[target_idx] = var_expr->get_varno() - 1;
50  continue;
51  }
52  }
53  return indices;
54 }
int get_varno() const
Definition: Analyzer.h:274
+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ target_expr_proj_indices()

std::vector<ssize_t> anonymous_namespace{QueryMemoryDescriptor.cpp}::target_expr_proj_indices ( const RelAlgExecutionUnit ra_exe_unit,
const Catalog_Namespace::Catalog cat 
)

Definition at line 56 of file QueryMemoryDescriptor.cpp.

References CHECK, get_column_descriptor_maybe(), RelAlgExecutionUnit::input_descs, is_real_str_or_array(), kENCODING_NONE, SortInfo::order_entries, RelAlgExecutionUnit::quals, RelAlgExecutionUnit::simple_quals, RelAlgExecutionUnit::sort_info, RelAlgExecutionUnit::target_exprs, and ScalarExprVisitor< T >::visit().

Referenced by QueryMemoryDescriptor::init().

57  {
58  if (ra_exe_unit.input_descs.size() > 1 ||
59  !ra_exe_unit.sort_info.order_entries.empty()) {
60  return {};
61  }
62  std::vector<ssize_t> target_indices(ra_exe_unit.target_exprs.size(), -1);
63  UsedColumnsVisitor columns_visitor;
64  std::unordered_set<int> used_columns;
65  for (const auto& simple_qual : ra_exe_unit.simple_quals) {
66  const auto crt_used_columns = columns_visitor.visit(simple_qual.get());
67  used_columns.insert(crt_used_columns.begin(), crt_used_columns.end());
68  }
69  for (const auto& qual : ra_exe_unit.quals) {
70  const auto crt_used_columns = columns_visitor.visit(qual.get());
71  used_columns.insert(crt_used_columns.begin(), crt_used_columns.end());
72  }
73  for (const auto& target : ra_exe_unit.target_exprs) {
74  const auto col_var = dynamic_cast<const Analyzer::ColumnVar*>(target);
75  if (col_var) {
76  const auto cd = get_column_descriptor_maybe(
77  col_var->get_column_id(), col_var->get_table_id(), cat);
78  if (!cd || !cd->isVirtualCol) {
79  continue;
80  }
81  }
82  const auto crt_used_columns = columns_visitor.visit(target);
83  used_columns.insert(crt_used_columns.begin(), crt_used_columns.end());
84  }
85  for (size_t target_idx = 0; target_idx < ra_exe_unit.target_exprs.size();
86  ++target_idx) {
87  const auto target_expr = ra_exe_unit.target_exprs[target_idx];
88  CHECK(target_expr);
89  const auto& ti = target_expr->get_type_info();
90  const bool is_real_str_or_array =
91  (ti.is_string() && ti.get_compression() == kENCODING_NONE) || ti.is_array();
92  if (is_real_str_or_array) {
93  continue;
94  }
95  if (ti.is_geometry()) {
96  // TODO(adb): Ideally we could determine which physical columns are required for a
97  // given query and fetch only those. For now, we bail on the memory optimization,
98  // since it is possible that adding the physical columns could have unintended
99  // consequences further down the execution path.
100  return {};
101  }
102  const auto col_var = dynamic_cast<const Analyzer::ColumnVar*>(target_expr);
103  if (!col_var) {
104  continue;
105  }
106  if (!is_real_str_or_array &&
107  used_columns.find(col_var->get_column_id()) == used_columns.end()) {
108  target_indices[target_idx] = 0;
109  }
110  }
111  return target_indices;
112 }
std::vector< Analyzer::Expr * > target_exprs
const std::list< Analyzer::OrderEntry > order_entries
const std::vector< InputDescriptor > input_descs
const ColumnDescriptor * get_column_descriptor_maybe(const int col_id, const int table_id, const Catalog_Namespace::Catalog &cat)
Definition: Execute.h:168
const SortInfo sort_info
bool is_real_str_or_array(const TargetInfo &target_info)
T visit(const Analyzer::Expr *expr) const
std::list< std::shared_ptr< Analyzer::Expr > > quals
#define CHECK(condition)
Definition: Logger.h:187
std::list< std::shared_ptr< Analyzer::Expr > > simple_quals
+ Here is the call graph for this function:
+ Here is the caller graph for this function: