OmniSciDB  cde582ebc3
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
FromTableReordering.cpp
Go to the documentation of this file.
1 /*
2  * Copyright 2022 HEAVY.AI, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "FromTableReordering.h"
18 #include "../Analyzer/Analyzer.h"
19 #include "Execute.h"
20 #include "RangeTableIndexVisitor.h"
21 
22 #include <numeric>
23 #include <queue>
24 #include <regex>
25 
26 namespace {
27 
28 using cost_t = unsigned;
29 using node_t = size_t;
30 
31 static std::unordered_map<SQLTypes, cost_t> GEO_TYPE_COSTS{{kPOINT, 60},
32  {kARRAY, 60},
33  {kLINESTRING, 70},
34  {kPOLYGON, 80},
35  {kMULTIPOLYGON, 90}};
36 
37 // Returns a lhs/rhs cost for the given qualifier. Must be strictly greater than 0.
38 std::tuple<cost_t, cost_t, InnerQualDecision> get_join_qual_cost(
39  const Analyzer::Expr* qual,
40  const Executor* executor) {
41  const auto func_oper = dynamic_cast<const Analyzer::FunctionOper*>(qual);
42  if (func_oper) {
43  std::vector<SQLTypes> geo_types_for_func;
44  for (size_t i = 0; i < func_oper->getArity(); i++) {
45  const auto arg_expr = func_oper->getArg(i);
46  const auto& ti = arg_expr->get_type_info();
47  if (ti.is_geometry() || is_constructed_point(arg_expr)) {
48  geo_types_for_func.push_back(ti.get_type());
49  }
50  }
51  std::regex geo_func_regex("ST_[\\w]*");
52  std::smatch geo_func_match;
53  const auto& func_name = func_oper->getName();
54  if (geo_types_for_func.size() == 2 &&
55  std::regex_match(func_name, geo_func_match, geo_func_regex)) {
56  const auto rhs_cost = GEO_TYPE_COSTS[geo_types_for_func[0]];
57  const auto lhs_cost = GEO_TYPE_COSTS[geo_types_for_func[1]];
58  return {lhs_cost, rhs_cost, InnerQualDecision::IGNORE};
59  }
60  return {200, 200, InnerQualDecision::IGNORE};
61  }
62  const auto bin_oper = dynamic_cast<const Analyzer::BinOper*>(qual);
63  if (!bin_oper || !IS_EQUIVALENCE(bin_oper->get_optype())) {
64  return {200, 200, InnerQualDecision::IGNORE};
65  }
66  InnerQualDecision inner_qual_decision = InnerQualDecision::UNKNOWN;
67  if (executor) {
68  try {
69  const auto normalized_bin_oper = HashJoin::normalizeColumnPairs(
70  bin_oper, *executor->getCatalog(), executor->getTemporaryTables());
71  const auto& inner_outer = normalized_bin_oper.first;
72  // normalization success, so we need to figure out which cv becomes an inner
73  auto lhs = bin_oper->get_left_operand();
74  if (auto lhs_tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(
75  bin_oper->get_left_operand())) {
76  lhs = lhs_tuple->getTuple().front().get();
77  }
78  CHECK(lhs);
79  if (lhs == inner_outer.front().first) {
80  inner_qual_decision = InnerQualDecision::LHS;
81  } else if (lhs == inner_outer.front().second) {
82  inner_qual_decision = InnerQualDecision::RHS;
83  }
84  } catch (const HashJoinFail& e) {
85  return {200, 200, e.inner_qual_decision};
86  } catch (...) {
87  return {200, 200, inner_qual_decision};
88  }
89  }
90  return {100, 100, inner_qual_decision};
91 }
92 
93 // Builds a graph with nesting levels as nodes and join condition costs as edges.
94 std::vector<std::map<node_t, cost_t>> build_join_cost_graph(
95  const JoinQualsPerNestingLevel& left_deep_join_quals,
96  const std::vector<InputTableInfo>& table_infos,
97  const Executor* executor,
98  std::vector<std::map<node_t, InnerQualDecision>>& qual_detection_res) {
99  CHECK_EQ(left_deep_join_quals.size() + 1, table_infos.size());
100  std::vector<std::map<node_t, cost_t>> join_cost_graph(table_infos.size());
102  // Build the constraints graph: nodes are nest levels, edges are the existence of
103  // qualifiers between levels.
104  for (const auto& current_level_join_conditions : left_deep_join_quals) {
105  for (const auto& qual : current_level_join_conditions.quals) {
106  std::set<int> qual_nest_levels = visitor.visit(qual.get());
107  if (qual_nest_levels.size() != 2) {
108  continue;
109  }
110  int lhs_nest_level = *qual_nest_levels.begin();
111  CHECK_GE(lhs_nest_level, 0);
112  qual_nest_levels.erase(qual_nest_levels.begin());
113  int rhs_nest_level = *qual_nest_levels.begin();
114  CHECK_GE(rhs_nest_level, 0);
115 
116  // Get the {lhs, rhs} cost for the qual
117  const auto qual_costing = get_join_qual_cost(qual.get(), executor);
118  qual_detection_res[lhs_nest_level][rhs_nest_level] = std::get<2>(qual_costing);
119  qual_detection_res[rhs_nest_level][lhs_nest_level] = std::get<2>(qual_costing);
120  const auto edge_it = join_cost_graph[lhs_nest_level].find(rhs_nest_level);
121  auto rhs_cost = std::get<1>(qual_costing);
122  if (edge_it == join_cost_graph[lhs_nest_level].end() ||
123  edge_it->second > rhs_cost) {
124  auto lhs_cost = std::get<0>(qual_costing);
125  join_cost_graph[lhs_nest_level][rhs_nest_level] = rhs_cost;
126  join_cost_graph[rhs_nest_level][lhs_nest_level] = lhs_cost;
127  }
128  }
129  }
130  return join_cost_graph;
131 }
132 
133 // Tracks dependencies between nodes.
135  public:
136  SchedulingDependencyTracking(const size_t node_count) : inbound_(node_count) {}
137 
138  // Add a from -> to dependency.
139  void addEdge(const node_t from, const node_t to) { inbound_[to].insert(from); }
140 
141  // Removes from's outbound dependencies.
142  void removeNode(const node_t from) {
143  for (auto& inbound_for_node : inbound_) {
144  inbound_for_node.erase(from);
145  }
146  }
147 
148  // Returns the set of all nodes without dependencies.
149  std::unordered_set<node_t> getRoots() const {
150  std::unordered_set<node_t> roots;
151  for (node_t candidate = 0; candidate < inbound_.size(); ++candidate) {
152  if (inbound_[candidate].empty()) {
153  roots.insert(candidate);
154  }
155  }
156  return roots;
157  }
158 
159  private:
160  std::vector<std::unordered_set<node_t>> inbound_;
161 };
162 
163 // The tree edge for traversal of the cost graph.
167 };
168 
169 // Builds dependency tracking based on left joins
171  const JoinQualsPerNestingLevel& left_deep_join_quals,
172  const std::vector<std::map<node_t, cost_t>>& join_cost_graph) {
173  SchedulingDependencyTracking dependency_tracking(left_deep_join_quals.size() + 1);
174  // Add directed graph edges for left join dependencies.
175  // See also start_it inside traverse_join_cost_graph(). These
176  // edges prevent start_it from pointing to a table with a
177  // left join dependency on another table.
178  for (size_t level_idx = 0; level_idx < left_deep_join_quals.size(); ++level_idx) {
179  if (left_deep_join_quals[level_idx].type == JoinType::LEFT) {
180  dependency_tracking.addEdge(level_idx, level_idx + 1);
181  }
182  }
183  return dependency_tracking;
184 }
185 
186 // Do a breadth-first traversal of the cost graph. This avoids scheduling a nest level
187 // before the ones which constraint it are scheduled and it favors equi joins over loop
188 // joins.
189 std::vector<node_t> traverse_join_cost_graph(
190  const std::vector<std::map<node_t, cost_t>>& join_cost_graph,
191  const std::vector<InputTableInfo>& table_infos,
192  const std::function<bool(const node_t lhs_nest_level, const node_t rhs_nest_level)>&
193  compare_node,
194  const std::function<bool(const TraversalEdge&, const TraversalEdge&)>& compare_edge,
195  const JoinQualsPerNestingLevel& left_deep_join_quals,
196  std::vector<std::map<node_t, InnerQualDecision>>& qual_normalization_res) {
197  std::vector<node_t> all_nest_levels(table_infos.size());
198  std::iota(all_nest_levels.begin(), all_nest_levels.end(), 0);
199  std::vector<node_t> input_permutation;
200  std::unordered_set<node_t> visited;
201  auto dependency_tracking =
202  build_dependency_tracking(left_deep_join_quals, join_cost_graph);
203  auto schedulable_node = [&dependency_tracking, &visited](const node_t node) {
204  const auto nodes_ready = dependency_tracking.getRoots();
205  return nodes_ready.find(node) != nodes_ready.end() &&
206  visited.find(node) == visited.end();
207  };
208  while (visited.size() < table_infos.size()) {
209  // Filter out nest levels which are already visited or have pending dependencies.
210  std::vector<node_t> remaining_nest_levels;
211  std::copy_if(all_nest_levels.begin(),
212  all_nest_levels.end(),
213  std::back_inserter(remaining_nest_levels),
214  schedulable_node);
215  CHECK(!remaining_nest_levels.empty());
216  // Start with the table with most tuples.
217  const auto start_it = std::max_element(
218  remaining_nest_levels.begin(), remaining_nest_levels.end(), compare_node);
219  CHECK(start_it != remaining_nest_levels.end());
220  std::priority_queue<TraversalEdge, std::vector<TraversalEdge>, decltype(compare_edge)>
221  worklist(compare_edge);
222  // look at all edges, compare the
223  // cost of our edge vs theirs, and pick the best start edge
224  node_t start = *start_it;
225  // we adaptively switch the inner and outer when we have a chance to exploit
226  // hash join framework for a query with a single binary join
227  TraversalEdge start_edge{start, 0};
228 
229  // when we have a single binary join in the query, we can analyze the qual and apply
230  // more smart table reordering logic that maximizes the chance of exploiting hash join
231  // todo (yoonmin) : generalize this for an arbitrary join pipeline
232  if (remaining_nest_levels.size() == 2 && qual_normalization_res[start].size() == 1) {
233  auto inner_qual_decision = qual_normalization_res[start].begin()->second;
234  auto join_qual = left_deep_join_quals.begin()->quals;
235  using ColvarSet =
236  std::set<const Analyzer::ColumnVar*,
237  bool (*)(const Analyzer::ColumnVar*, const Analyzer::ColumnVar*)>;
238 
239  auto set_new_rte_idx = [](ColvarSet& cv_set, int new_rte) {
240  std::for_each(
241  cv_set.begin(), cv_set.end(), [new_rte](const Analyzer::ColumnVar* cv) {
242  const_cast<Analyzer::ColumnVar*>(cv)->set_rte_idx(new_rte);
243  });
244  };
245 
246  // IGNORE: use the existing table reordering logic
247  // KEEP: return the existing table permutation and related cvs (column variables)
248  // SWAP: change the starting table of the table reordering logic and relevant
249  // columns' rte index
250  enum class Decision { IGNORE, KEEP, SWAP };
251 
252  auto analyze_join_qual = [&start,
253  &remaining_nest_levels,
254  &inner_qual_decision,
255  &table_infos,
256  compare_node](const std::shared_ptr<Analyzer::Expr>& lhs,
257  ColvarSet& lhs_colvar_set,
258  const std::shared_ptr<Analyzer::Expr>& rhs,
259  ColvarSet& rhs_colvar_set) {
260  if (!lhs || !rhs || lhs_colvar_set.empty() || rhs_colvar_set.empty()) {
261  return std::make_pair(Decision::IGNORE, start);
262  }
263 
264  auto alternative_it = std::find_if(
265  remaining_nest_levels.begin(),
266  remaining_nest_levels.end(),
267  [start](const size_t nest_level) { return start != nest_level; });
268  CHECK(alternative_it != remaining_nest_levels.end());
269  auto alternative_rte = *alternative_it;
270 
271  Decision decision = Decision::IGNORE;
272  // inner col's rte should be larger than outer col
273  int inner_rte = -1;
274  int outer_rte = -1;
275  bool is_outer_col_valid = false;
276  auto check_expr_is_valid_col = [&is_outer_col_valid](const Analyzer::Expr* expr) {
277  if (auto expr_tuple = dynamic_cast<const Analyzer::ExpressionTuple*>(expr)) {
278  for (auto& inner_expr : expr_tuple->getTuple()) {
279  auto cv_from_expr =
280  HashJoin::getHashJoinColumn<Analyzer::ColumnVar>(inner_expr.get());
281  if (!cv_from_expr) {
282  is_outer_col_valid = false;
283  return;
284  }
285  }
286  } else {
287  auto cv_from_expr = HashJoin::getHashJoinColumn<Analyzer::ColumnVar>(expr);
288  if (!cv_from_expr) {
289  is_outer_col_valid = false;
290  return;
291  }
292  }
293  is_outer_col_valid = true;
294  };
295  if (inner_qual_decision == InnerQualDecision::LHS) {
296  inner_rte = (*lhs_colvar_set.begin())->get_rte_idx();
297  outer_rte = (*rhs_colvar_set.begin())->get_rte_idx();
298  check_expr_is_valid_col(rhs.get());
299  } else if (inner_qual_decision == InnerQualDecision::RHS) {
300  inner_rte = (*rhs_colvar_set.begin())->get_rte_idx();
301  outer_rte = (*lhs_colvar_set.begin())->get_rte_idx();
302  check_expr_is_valid_col(lhs.get());
303  }
304  if (inner_rte >= 0 && outer_rte >= 0) {
305  const auto inner_cardinality =
306  table_infos[inner_rte].info.getNumTuplesUpperBound();
307  const auto outer_cardinality =
308  table_infos[outer_rte].info.getNumTuplesUpperBound();
309  if (inner_cardinality > g_trivial_loop_join_threshold) {
310  if (inner_rte == static_cast<int>(start)) {
311  // inner is driving the join loop but also has a valid join column
312  // which is available for building a hash table
313  // but ignore swapping when inner's cardinality is larger than that of
314  // outer's / otherwise swap inner and outer (to use the valid inner)
315  decision = is_outer_col_valid && inner_cardinality > outer_cardinality
316  ? Decision::IGNORE
317  : Decision::SWAP;
318  } else {
319  CHECK_EQ(inner_rte, static_cast<int>(alternative_rte));
320  // now, a valid inner column is outer table
321  if (compare_node(inner_rte, start)) {
322  // but outer table is larger than the current inner
323  // so we can exploit the existing table reordering logic
324  decision = Decision::IGNORE;
325  } else {
326  // and outer table is smaller than the current inner
327  // so we do not need to reorder the table starting from the inner
328  decision = Decision::KEEP;
329  }
330  }
331  }
332  }
333 
334  if (decision == Decision::KEEP) {
335  return std::make_pair(decision, start);
336  } else if (decision == Decision::SWAP) {
337  return std::make_pair(decision, alternative_rte);
338  }
339  return std::make_pair(Decision::IGNORE, start);
340  };
341 
342  auto collect_colvars = [](const std::shared_ptr<Analyzer::Expr> expr,
343  ColvarSet& cv_set) {
344  expr->collect_column_var(cv_set, false);
345  };
346 
347  auto adjust_reordering_logic = [&start, &start_edge, &start_it, set_new_rte_idx](
348  Decision decision,
349  int alternative_rte,
350  ColvarSet& lhs_colvar_set,
351  ColvarSet& rhs_colvar_set) {
352  CHECK(decision == Decision::SWAP);
353  start = alternative_rte;
354  set_new_rte_idx(lhs_colvar_set, alternative_rte);
355  set_new_rte_idx(rhs_colvar_set, *start_it);
356  start_edge.join_cost = 0;
357  start_edge.nest_level = start;
358  };
359 
360  auto bin_op = dynamic_cast<Analyzer::BinOper*>(join_qual.begin()->get());
361  if (bin_op) {
362  auto lhs = bin_op->get_own_left_operand();
363  auto rhs = bin_op->get_own_right_operand();
364  if (auto lhs_exp = dynamic_cast<Analyzer::ExpressionTuple*>(lhs.get())) {
365  // retrieve the decision and info for adjusting reordering by referring the
366  // first cv and apply them to the rest of cvs
367  auto rhs_exp = dynamic_cast<Analyzer::ExpressionTuple*>(rhs.get());
368  CHECK(rhs_exp);
369  auto& lhs_exprs = lhs_exp->getTuple();
370  auto& rhs_exprs = rhs_exp->getTuple();
371  CHECK_EQ(lhs_exprs.size(), rhs_exprs.size());
372  for (size_t i = 0; i < lhs_exprs.size(); ++i) {
373  Decision decision{Decision::IGNORE};
374  int alternative_rte_idx = -1;
375  ColvarSet lhs_colvar_set(Analyzer::ColumnVar::colvar_comp);
376  ColvarSet rhs_colvar_set(Analyzer::ColumnVar::colvar_comp);
377  collect_colvars(lhs_exprs.at(i), lhs_colvar_set);
378  collect_colvars(rhs_exprs.at(i), rhs_colvar_set);
379  if (i == 0) {
380  auto investigation_res =
381  analyze_join_qual(lhs, lhs_colvar_set, rhs, rhs_colvar_set);
382  decision = investigation_res.first;
383  if (decision == Decision::KEEP) {
384  return remaining_nest_levels;
385  }
386  alternative_rte_idx = investigation_res.second;
387  }
388  if (decision == Decision::SWAP) {
389  adjust_reordering_logic(
390  decision, alternative_rte_idx, lhs_colvar_set, rhs_colvar_set);
391  }
392  }
393  } else {
394  ColvarSet lhs_colvar_set(Analyzer::ColumnVar::colvar_comp);
395  ColvarSet rhs_colvar_set(Analyzer::ColumnVar::colvar_comp);
396  collect_colvars(lhs, lhs_colvar_set);
397  collect_colvars(rhs, rhs_colvar_set);
398  auto investigation_res =
399  analyze_join_qual(lhs, lhs_colvar_set, rhs, rhs_colvar_set);
400  if (investigation_res.first == Decision::KEEP) {
401  return remaining_nest_levels;
402  } else if (investigation_res.first == Decision::SWAP) {
403  adjust_reordering_logic(investigation_res.first,
404  investigation_res.second,
405  lhs_colvar_set,
406  rhs_colvar_set);
407  }
408  }
409  }
410  }
411 
412  VLOG(2) << "Table reordering starting with nest level " << start;
413  for (const auto& graph_edge : join_cost_graph[*start_it]) {
414  const node_t succ = graph_edge.first;
415  if (!schedulable_node(succ)) {
416  continue;
417  }
418  const TraversalEdge succ_edge{succ, graph_edge.second};
419  for (const auto& successor_edge : join_cost_graph[succ]) {
420  if (successor_edge.first == start) {
421  start_edge.join_cost = successor_edge.second;
422  // lhs cost / num tuples less than rhs cost if compare edge is true, swap nest
423  // levels
424  if (compare_edge(start_edge, succ_edge)) {
425  VLOG(2) << "Table reordering changing start nest level from " << start
426  << " to " << succ;
427  start = succ;
428  start_edge = succ_edge;
429  }
430  }
431  }
432  }
433  VLOG(2) << "Table reordering picked start nest level " << start << " with cost "
434  << start_edge.join_cost;
435  CHECK_EQ(start, start_edge.nest_level);
436  worklist.push(start_edge);
437  const auto it_ok = visited.insert(start);
438  CHECK(it_ok.second);
439  while (!worklist.empty()) {
440  // Extract a node and add it to the permutation.
441  TraversalEdge crt = worklist.top();
442  worklist.pop();
443  VLOG(1) << "Insert input permutation, idx: " << input_permutation.size()
444  << ", nest_level: " << crt.nest_level;
445  input_permutation.push_back(crt.nest_level);
446  dependency_tracking.removeNode(crt.nest_level);
447  // Add successors which are ready and not visited yet to the queue.
448  for (const auto& graph_edge : join_cost_graph[crt.nest_level]) {
449  const node_t succ = graph_edge.first;
450  if (!schedulable_node(succ)) {
451  continue;
452  }
453  worklist.push(TraversalEdge{succ, graph_edge.second});
454  const auto it_ok = visited.insert(succ);
455  CHECK(it_ok.second);
456  }
457  }
458  }
459  return input_permutation;
460 }
461 
462 } // namespace
463 
464 std::vector<node_t> get_node_input_permutation(
465  const JoinQualsPerNestingLevel& left_deep_join_quals,
466  const std::vector<InputTableInfo>& table_infos,
467  const Executor* executor) {
468  std::vector<std::map<node_t, InnerQualDecision>> qual_normalization_res(
469  table_infos.size());
470  const auto join_cost_graph = build_join_cost_graph(
471  left_deep_join_quals, table_infos, executor, qual_normalization_res);
472  // Use the number of tuples in each table to break ties in BFS.
473  const auto compare_node = [&table_infos](const node_t lhs_nest_level,
474  const node_t rhs_nest_level) {
475  return table_infos[lhs_nest_level].info.getNumTuplesUpperBound() <
476  table_infos[rhs_nest_level].info.getNumTuplesUpperBound();
477  };
478  const auto compare_edge = [&compare_node](const TraversalEdge& lhs_edge,
479  const TraversalEdge& rhs_edge) {
480  // Only use the number of tuples as a tie-breaker, if costs are equal.
481  if (lhs_edge.join_cost == rhs_edge.join_cost) {
482  return compare_node(lhs_edge.nest_level, rhs_edge.nest_level);
483  }
484  return lhs_edge.join_cost > rhs_edge.join_cost;
485  };
486  return traverse_join_cost_graph(join_cost_graph,
487  table_infos,
488  compare_node,
489  compare_edge,
490  left_deep_join_quals,
491  qual_normalization_res);
492 }
#define CHECK_EQ(x, y)
Definition: Logger.h:230
static bool colvar_comp(const ColumnVar *l, const ColumnVar *r)
Definition: Analyzer.h:215
#define IS_EQUIVALENCE(X)
Definition: sqldefs.h:68
SchedulingDependencyTracking build_dependency_tracking(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< std::map< node_t, cost_t >> &join_cost_graph)
static std::pair< std::vector< InnerOuter >, std::vector< InnerOuterStringOpInfos > > normalizeColumnPairs(const Analyzer::BinOper *condition, const Catalog_Namespace::Catalog &cat, const TemporaryTables *temporary_tables)
Definition: HashJoin.cpp:989
bool is_constructed_point(const Analyzer::Expr *expr)
Definition: Execute.h:1427
static std::unordered_map< SQLTypes, cost_t > std::tuple< cost_t, cost_t, InnerQualDecision > get_join_qual_cost(const Analyzer::Expr *qual, const Executor *executor)
#define CHECK_GE(x, y)
Definition: Logger.h:235
std::vector< JoinCondition > JoinQualsPerNestingLevel
T visit(const Analyzer::Expr *expr) const
unsigned g_trivial_loop_join_threshold
Definition: Execute.cpp:89
std::vector< node_t > get_node_input_permutation(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor)
static std::unordered_map< SQLTypes, cost_t > GEO_TYPE_COSTS
std::vector< std::map< node_t, cost_t > > build_join_cost_graph(const JoinQualsPerNestingLevel &left_deep_join_quals, const std::vector< InputTableInfo > &table_infos, const Executor *executor, std::vector< std::map< node_t, InnerQualDecision >> &qual_detection_res)
const SQLTypeInfo & get_type_info() const
Definition: Analyzer.h:81
const Analyzer::Expr * getArg(const size_t i) const
Definition: Analyzer.h:2171
DEVICE void iota(ARGS &&...args)
Definition: gpu_enabled.h:69
#define CHECK(condition)
Definition: Logger.h:222
InnerQualDecision
Definition: HashJoin.h:52
std::vector< node_t > traverse_join_cost_graph(const std::vector< std::map< node_t, cost_t >> &join_cost_graph, const std::vector< InputTableInfo > &table_infos, const std::function< bool(const node_t lhs_nest_level, const node_t rhs_nest_level)> &compare_node, const std::function< bool(const TraversalEdge &, const TraversalEdge &)> &compare_edge, const JoinQualsPerNestingLevel &left_deep_join_quals, std::vector< std::map< node_t, InnerQualDecision >> &qual_normalization_res)
AccessManager::Decision Decision
InnerQualDecision inner_qual_decision
Definition: HashJoin.h:68
const std::shared_ptr< Analyzer::Expr > get_own_left_operand() const
Definition: Analyzer.h:451
#define VLOG(n)
Definition: Logger.h:316