OmniSciDB  c1a53651b2
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
RuntimeFunctions.cpp File Reference
#include "RuntimeFunctions.h"
#include "BufferCompaction.h"
#include "HyperLogLogRank.h"
#include "MurmurHash.h"
#include "Shared/Datum.h"
#include "Shared/quantile.h"
#include "TypePunning.h"
#include "Utils/SegmentTreeUtils.h"
#include <atomic>
#include <chrono>
#include <cmath>
#include <cstring>
#include <functional>
#include <thread>
#include <tuple>
#include "DecodersImpl.h"
#include "GeoOpsRuntime.cpp"
#include "GroupByRuntime.cpp"
#include "JoinHashTable/Runtime/JoinHashTableQueryRuntime.cpp"
#include "TopKRuntime.cpp"
+ Include dependency graph for RuntimeFunctions.cpp:

Go to the source code of this file.

Namespaces

 anonymous_namespace{RuntimeFunctions.cpp}
 

Macros

#define DEF_ARITH_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_SAFE_DIV_NULLABLE(type, null_type, opname)
 
#define DEF_BINARY_NULLABLE_ALL_OPS(type, null_type)
 
#define DEF_MAP_STRING_TO_DATUM(value_type, value_name)
 
#define DEF_UMINUS_NULLABLE(type, null_type)
 
#define DEF_CAST_NULLABLE(from_type, to_type)
 
#define DEF_CAST_SCALED_NULLABLE(from_type, to_type)
 
#define DEF_CAST_NULLABLE_BIDIR(type1, type2)
 
#define DEF_ROUND_NULLABLE(from_type, to_type)
 
#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))
 
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME(value_type, oper_name)
 
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME_ALL_TYPES(oper_name)
 
#define DEF_RANGE_MODE_FRAME_LOWER_BOUND(target_val_type, col_type, null_type, opname, opsym)
 
#define DEF_RANGE_MODE_FRAME_UPPER_BOUND(target_val_type, col_type, null_type, opname, opsym)
 
#define DEF_GET_VALUE_IN_FRAME(col_type, logical_type)
 
#define DEF_SEARCH_AGGREGATION_TREE(agg_value_type)
 
#define DEF_SEARCH_DERIVED_AGGREGATION_TREE(agg_value_type)
 
#define DEF_HANDLE_NULL_FOR_WINDOW_FRAMING_AGG(agg_type, null_type)
 
#define DEF_AGG_MAX_INT(n)
 
#define DEF_AGG_MIN_INT(n)
 
#define DEF_AGG_ID_INT(n)
 
#define DEF_CHECKED_SINGLE_AGG_ID_INT(n)
 
#define DEF_WRITE_PROJECTION_INT(n)
 
#define DEF_SKIP_AGG_ADD(base_agg_func)
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DATA_T   int64_t
 
#define DATA_T   int32_t
 
#define DATA_T   int16_t
 
#define DATA_T   int8_t
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DEF_SKIP_IF_AGG(skip_agg_func, base_agg_func)
 
#define DATA_T   double
 
#define ADDR_T   int64_t
 
#define DATA_T   float
 
#define ADDR_T   int32_t
 
#define DEF_SHARED_AGG_RET_STUBS(base_agg_func)
 
#define DEF_SHARED_AGG_STUBS(base_agg_func)
 

Enumerations

enum  anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType { anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType::MIN, anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType::MAX, anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType::SUM }
 

Functions

RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_up (const int64_t operand, const uint64_t scale, const int64_t operand_null_val, const int64_t result_null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_not_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_lhs (const int64_t dividend, const int64_t divisor)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_nullable_lhs (const int64_t dividend, const int64_t divisor, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not (const int8_t operand, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count (uint64_t *agg, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap (int64_t *agg, const int64_t val, const int64_t min_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct (int64_t *agg, const int64_t key, const uint32_t b)
 
GPU_RT_STUB void agg_approximate_count_distinct_gpu (int64_t *, const int64_t, const uint32_t, const int64_t, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set (const int64_t bitset, const int64_t val, const int64_t min_val, const int64_t max_val, const int64_t null_val, const int8_t null_bool_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_int64_t_lower_bound (const int64_t entry_cnt, const int64_t target_value, const int64_t *col_buf)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
get_valid_buf_start_pos (const int64_t null_start_pos, const int64_t null_end_pos)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
get_valid_buf_end_pos (const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
 
template<typename T , typename Comparator >
int64_t compute_current_row_idx_in_frame (const int64_t num_elems, const int64_t cur_row_idx, const T *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const T null_val, const bool nulls_first, const int64_t null_start_pos, const int64_t null_end_pos, Comparator cmp)
 
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_lower_bound_from_ordered_partition_index (const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
 
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_upper_bound_from_ordered_partition_index (const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
 
template<typename COL_TYPE , typename LOGICAL_TYPE >
LOGICAL_TYPE get_value_in_window_frame (const int64_t target_row_idx_in_frame, const int64_t frame_start_offset, const int64_t frame_end_offset, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const LOGICAL_TYPE logical_null_val, const LOGICAL_TYPE col_null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
encode_date (int64_t decoded_val, int64_t null_val, int64_t multiplier)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_start_index_sub (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_start_index_add (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound, int64_t num_current_partition_elem)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_end_index_sub (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_end_index_add (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound, int64_t num_current_partition_elem)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_integer_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
double * 
get_double_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
SumAndCountPair< int64_t > * 
get_integer_derived_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
SumAndCountPair< double > * 
get_double_derived_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal (size_t level, size_t tree_fanout)
 
template<AggFuncType AGG_FUNC_TYPE, typename AGG_TYPE >
AGG_TYPE anonymous_namespace{RuntimeFunctions.cpp}::agg_func (AGG_TYPE const lhs, AGG_TYPE const rhs)
 
template<AggFuncType AGG_FUNC_TYPE, typename AGG_TYPE >
AGG_TYPE compute_window_func_via_aggregation_tree (AGG_TYPE *aggregation_tree_for_partition, size_t query_range_start_idx, size_t query_range_end_idx, size_t leaf_level, size_t tree_fanout, AGG_TYPE init_val, AGG_TYPE invalid_val, AGG_TYPE null_val)
 
template<typename AGG_VALUE_TYPE >
void compute_derived_aggregates (SumAndCountPair< AGG_VALUE_TYPE > *aggregation_tree_for_partition, SumAndCountPair< AGG_VALUE_TYPE > &res, size_t query_range_start_idx, size_t query_range_end_idx, size_t leaf_level, size_t tree_fanout, AGG_VALUE_TYPE invalid_val, AGG_VALUE_TYPE null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_if (int64_t *agg, const int64_t val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int8_t * 
agg_id_varlen (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id (int64_t *agg, const int64_t val, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val (int64_t *agg, const int64_t val, const int64_t min_val, const int64_t skip_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32 (uint32_t *agg, const int32_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_if_int32 (uint32_t *agg, const int32_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32 (int32_t *agg, const int32_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_if_int32 (int32_t *agg, const int32_t val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_skip_val (int64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32_skip_val (int32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_if_skip_val (int64_t *agg, const int64_t val, const int64_t skip_val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_if_int32_skip_val (int32_t *agg, const int32_t val, const int32_t skip_val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_if (uint64_t *agg, const int64_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_skip_val (uint64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_if_skip_val (uint64_t *agg, const int64_t cond, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32_skip_val (uint32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_if_int32_skip_val (uint32_t *agg, const int32_t cond, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double (uint64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_if_double (int64_t *agg, const double val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_double (int64_t *agg, const double val, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float (uint32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_if_sum_float (int32_t *agg, const float val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_float (int32_t *agg, const float val, const float null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double_skip_val (uint64_t *agg, const double val, const double skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float_skip_val (uint32_t *agg, const float val, const float skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_floor (const int64_t x, const int64_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_ceil (const int64_t x, const int64_t scale)
 
GPU_RT_STUB int8_t * agg_id_varlen_shared (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
GPU_RT_STUB int32_t checked_single_agg_id_shared (int64_t *agg, const int64_t val, const int64_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared (int32_t *agg, const int32_t val, const int32_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared (int16_t *agg, const int16_t val, const int16_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared (int8_t *agg, const int8_t val, const int8_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_double_shared (int64_t *agg, const double val, const double null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_float_shared (int32_t *agg, const float val, const float null_val)
 
GPU_RT_STUB void agg_max_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_max_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_min_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_min_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_id_double_shared_slow (int64_t *agg, const double *val)
 
GPU_RT_STUB int64_t agg_sum_shared (int64_t *agg, const int64_t val)
 
GPU_RT_STUB int64_t agg_sum_if_shared (int64_t *agg, const int64_t val, const int8_t cond)
 
GPU_RT_STUB int64_t agg_sum_skip_val_shared (int64_t *agg, const int64_t val, const int64_t skip_val)
 
GPU_RT_STUB int64_t agg_sum_if_skip_val_shared (int64_t *agg, const int64_t val, const int64_t skip_val, const int8_t cond)
 
GPU_RT_STUB int32_t agg_sum_int32_shared (int32_t *agg, const int32_t val)
 
GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared (int32_t *agg, const int32_t val, const int32_t skip_val)
 
GPU_RT_STUB void agg_sum_double_shared (int64_t *agg, const double val)
 
GPU_RT_STUB void agg_sum_double_skip_val_shared (int64_t *agg, const double val, const double skip_val)
 
GPU_RT_STUB void agg_sum_float_shared (int32_t *agg, const float val)
 
GPU_RT_STUB void agg_sum_float_skip_val_shared (int32_t *agg, const float val, const float skip_val)
 
GPU_RT_STUB int32_t agg_sum_if_int32_shared (int32_t *agg, const int32_t val, const int8_t cond)
 
GPU_RT_STUB int32_t agg_sum_if_int32_skip_val_shared (int32_t *agg, const int32_t val, const int32_t skip_val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_double_shared (int64_t *agg, const double val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_double_skip_val_shared (int64_t *agg, const double val, const double skip_val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_float_shared (int32_t *agg, const float val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_float_skip_val_shared (int32_t *agg, const float val, const float skip_val, const int8_t cond)
 
GPU_RT_STUB void force_sync ()
 
GPU_RT_STUB void sync_warp ()
 
GPU_RT_STUB void sync_warp_protected (int64_t thread_pos, int64_t row_count)
 
GPU_RT_STUB void sync_threadblock ()
 
GPU_RT_STUB void write_back_non_grouped_agg (int64_t *input_buffer, int64_t *output_buffer, const int32_t num_agg_cols)
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl (int32_t *error_code)
 
RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ()
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ()
 
GPU_RT_STUB int8_t thread_warp_idx (const int8_t warp_sz)
 
GPU_RT_STUB int64_t get_thread_index ()
 
GPU_RT_STUB int64_t * declare_dynamic_shared_memory ()
 
GPU_RT_STUB int64_t get_block_index ()
 
RUNTIME_EXPORT ALWAYS_INLINE void record_error_code (const int32_t err_code, int32_t *error_codes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_error_code (int32_t *error_codes)
 
RUNTIME_EXPORT NEVER_INLINE
const int64_t * 
init_shared_mem_nop (const int64_t *groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void write_back_nop (int64_t *dest, int64_t *src, const int32_t sz)
 
RUNTIME_EXPORT int64_t * init_shared_mem (const int64_t *global_groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const bool blocks_share_memory, const int32_t frag_idx)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
template<typename T >
ALWAYS_INLINE int64_t * get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width, const uint32_t row_size_quad)
 
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_columnar (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_qw_count, const size_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash_keyless (int64_t *groups_buffer, const uint32_t hashed_index, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless_semiprivate (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad, const uint8_t thread_warp_idx, const uint8_t warp_size)
 
RUNTIME_EXPORT ALWAYS_INLINE
StringView 
string_pack (const int8_t *ptr, const int32_t len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length (const char *str, const int32_t str_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length_nullable (const char *str, const int32_t str_len, const int32_t int_null)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
key_for_string_encoded (const int32_t str_id)
 
ALWAYS_INLINE DEVICE int32_t map_string_dict_id (const int32_t string_id, const int64_t translation_map_handle, const int32_t min_source_id)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
sample_ratio (const double proportion, const int64_t row_offset)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_reversed_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_nullable (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_no_oob_check (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
row_number_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_double (const int64_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE float load_float (const int32_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int (const int64_t *sum, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal (const int64_t *sum, const int64_t *count, const double null_val, const uint32_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double (const int64_t *agg, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float (const int32_t *agg, const int32_t *count, const double null_val)
 
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count (uint8_t *bitmap, const uint32_t bitmap_bytes, const uint8_t *key_bytes, const uint32_t key_len)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals (const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT void multifrag_query_hoisted_literals (const int8_t ***col_buffers, const uint64_t *num_fragments, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, int32_t *total_matched, const int64_t *init_agg_value, int64_t **out, int32_t *error_code, const uint32_t *num_tables_ptr, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub (const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT void multifrag_query (const int8_t ***col_buffers, const uint64_t *num_fragments, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, int32_t *total_matched, const int64_t *init_agg_value, int64_t **out, int32_t *error_code, const uint32_t *num_tables_ptr, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
check_interrupt ()
 
RUNTIME_EXPORT bool check_interrupt_init (unsigned command)
 

Macro Definition Documentation

#define ADDR_T   int64_t

Definition at line 1510 of file RuntimeFunctions.cpp.

#define ADDR_T   int32_t

Definition at line 1510 of file RuntimeFunctions.cpp.

#define DATA_T   int64_t

Definition at line 1509 of file RuntimeFunctions.cpp.

#define DATA_T   int32_t

Definition at line 1509 of file RuntimeFunctions.cpp.

#define DATA_T   int16_t

Definition at line 1509 of file RuntimeFunctions.cpp.

#define DATA_T   int8_t

Definition at line 1509 of file RuntimeFunctions.cpp.

#define DATA_T   double

Definition at line 1509 of file RuntimeFunctions.cpp.

#define DATA_T   float

Definition at line 1509 of file RuntimeFunctions.cpp.

#define DEF_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_id_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = val; \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1186 of file RuntimeFunctions.cpp.

#define DEF_AGG_MAX_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_max_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::max(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1164 of file RuntimeFunctions.cpp.

#define DEF_AGG_MIN_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_min_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::min(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1175 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 44 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_lhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 53 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_rhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 62 of file RuntimeFunctions.cpp.

#define DEF_BINARY_NULLABLE_ALL_OPS (   type,
  null_type 
)

Definition at line 116 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val ? to_null_val : operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 260 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE_BIDIR (   type1,
  type2 
)
Value:
DEF_CAST_NULLABLE(type1, type2) \
DEF_CAST_NULLABLE(type2, type1)
#define DEF_CAST_NULLABLE(from_type, to_type)

Definition at line 277 of file RuntimeFunctions.cpp.

#define DEF_CAST_SCALED_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_scaled_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val, \
const to_type multiplier) { \
return operand == from_null_val ? to_null_val : multiplier * operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 268 of file RuntimeFunctions.cpp.

#define DEF_CHECKED_SINGLE_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_int##n( \
int##n##_t* agg, const int##n##_t val, const int##n##_t null_val) { \
if (val == null_val) { \
return 0; \
} \
if (*agg == val) { \
return 0; \
} else if (*agg == null_val) { \
*agg = val; \
return 0; \
} else { \
/* see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES*/ \
return 15; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1192 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 71 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_lhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 83 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_rhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 95 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME (   value_type,
  oper_name 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
compute_##value_type##_##oper_name##_current_row_idx_in_frame( \
const int64_t num_elems, \
const int64_t cur_row_idx, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const value_type null_val, \
const bool nulls_first, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
return compute_current_row_idx_in_frame<value_type>(num_elems, \
cur_row_idx, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
nulls_first, \
null_start_pos, \
null_end_pos, \
std::oper_name<value_type>{}); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 495 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME_ALL_TYPES (   oper_name)
Value:
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME(value_type, oper_name)

Definition at line 518 of file RuntimeFunctions.cpp.

#define DEF_GET_VALUE_IN_FRAME (   col_type,
  logical_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE logical_type \
get_##col_type##_value_##logical_type##_type_in_frame( \
const int64_t target_row_idx_in_frame, \
const int64_t frame_start_offset, \
const int64_t frame_end_offset, \
const col_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const logical_type logical_null_val, \
const logical_type col_null_val) { \
return get_value_in_window_frame<col_type, logical_type>(target_row_idx_in_frame, \
frame_start_offset, \
frame_end_offset, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
logical_null_val, \
col_null_val); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 716 of file RuntimeFunctions.cpp.

#define DEF_HANDLE_NULL_FOR_WINDOW_FRAMING_AGG (   agg_type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE agg_type \
handle_null_val_##agg_type##_##null_type##_window_framing_agg( \
agg_type res, null_type agg_null_val, agg_type input_col_null_val) { \
if (res == agg_null_val) { \
return input_col_null_val; \
} \
return res; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1057 of file RuntimeFunctions.cpp.

#define DEF_MAP_STRING_TO_DATUM (   value_type,
  value_name 
)
Value:
extern "C" ALWAYS_INLINE DEVICE value_type map_string_to_datum_##value_name( \
const int32_t string_id, \
const int64_t translation_map_handle, \
const int32_t min_source_id) { \
const Datum* translation_map = \
reinterpret_cast<const Datum*>(translation_map_handle); \
const Datum& out_datum = translation_map[string_id - min_source_id]; \
return out_datum.value_name##val; \
}
#define DEVICE
#define ALWAYS_INLINE
Definition: Datum.h:67

Definition at line 177 of file RuntimeFunctions.cpp.

#define DEF_RANGE_MODE_FRAME_LOWER_BOUND (   target_val_type,
  col_type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
range_mode_##target_val_type##_##col_type##_##null_type##_##opname##_frame_lower_bound( \
const int64_t num_elems, \
const target_val_type target_value, \
const col_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t frame_bound_val, \
const null_type null_val, \
const bool nulls_first, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_start_pos; \
} \
target_val_type new_val = target_value opsym frame_bound_val; \
col_type, \
null_type>( \
num_elems, \
new_val, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
nulls_first, \
null_start_pos, \
null_end_pos); \
}
#define RUNTIME_EXPORT
int64_t compute_lower_bound_from_ordered_partition_index(const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
#define ALWAYS_INLINE

Definition at line 560 of file RuntimeFunctions.cpp.

#define DEF_RANGE_MODE_FRAME_UPPER_BOUND (   target_val_type,
  col_type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
range_mode_##target_val_type##_##col_type##_##null_type##_##opname##_frame_upper_bound( \
const int64_t num_elems, \
const target_val_type target_value, \
const col_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t frame_bound_val, \
const null_type null_val, \
const bool nulls_first, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_end_pos; \
} \
target_val_type new_val = target_value opsym frame_bound_val; \
col_type, \
null_type>( \
num_elems, \
new_val, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
nulls_first, \
null_start_pos, \
null_end_pos); \
}
int64_t compute_upper_bound_from_ordered_partition_index(const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 641 of file RuntimeFunctions.cpp.

#define DEF_ROUND_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val \
? to_null_val \
: static_cast<to_type>(operand + (operand < from_type(0) \
? from_type(-0.5) \
: from_type(0.5))); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 281 of file RuntimeFunctions.cpp.

#define DEF_SAFE_DIV_NULLABLE (   type,
  null_type,
  opname 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type safe_div_##type( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val && rhs != 0) { \
return lhs / rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 107 of file RuntimeFunctions.cpp.

#define DEF_SEARCH_AGGREGATION_TREE (   agg_value_type)

Definition at line 895 of file RuntimeFunctions.cpp.

#define DEF_SEARCH_DERIVED_AGGREGATION_TREE (   agg_value_type)

Definition at line 1016 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_RET_STUBS (   base_agg_func)

Definition at line 1539 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_STUBS (   base_agg_func)
Value:
extern "C" GPU_RT_STUB void base_agg_func##_shared(int64_t* agg, const int64_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_skip_val_shared( \
int64_t* agg, const int64_t val, const int64_t skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int32_shared(int32_t* agg, \
const int32_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int16_shared(int16_t* agg, \
const int16_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int8_shared(int8_t* agg, \
const int8_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_int32_skip_val_shared( \
int32_t* agg, const int32_t val, const int32_t skip_val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_shared(int64_t* agg, \
const double val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_skip_val_shared( \
int64_t* agg, const double val, const double skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_float_shared(int32_t* agg, \
const float val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_float_skip_val_shared( \
int32_t* agg, const float val, const float skip_val) {}
#define GPU_RT_STUB

Definition at line 1578 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const DATA_T old_agg = *agg; \
if (old_agg != skip_val) { \
base_agg_func(agg, val); \
} else { \
*agg = val; \
} \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1479 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const ADDR_T old_agg = *agg; \
if (old_agg != *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&skip_val))) { \
base_agg_func(agg, val); \
} else { \
*agg = *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&val)); \
} \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1479 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG_ADD (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
base_agg_func(agg, val); \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1312 of file RuntimeFunctions.cpp.

#define DEF_SKIP_IF_AGG (   skip_agg_func,
  base_agg_func 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void skip_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val, const int8_t cond) { \
if (cond) { \
base_agg_func##_skip_val(agg, val, skip_val); \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1492 of file RuntimeFunctions.cpp.

#define DEF_UMINUS_NULLABLE (   type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type uminus_##type##_nullable( \
const type operand, const null_type null_val) { \
return operand == null_val ? null_val : -operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 245 of file RuntimeFunctions.cpp.

#define DEF_WRITE_PROJECTION_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void write_projection_int##n( \
int8_t* slot_ptr, const int##n##_t val, const int64_t init_val) { \
if (val != init_val) { \
*reinterpret_cast<int##n##_t*>(slot_ptr) = val; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1220 of file RuntimeFunctions.cpp.

#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))

Definition at line 373 of file RuntimeFunctions.cpp.

Function Documentation

RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct ( int64_t *  agg,
const int64_t  key,
const uint32_t  b 
)

Definition at line 385 of file RuntimeFunctions.cpp.

References get_rank(), and MurmurHash64A().

385  {
386  const uint64_t hash = MurmurHash64A(&key, sizeof(key), 0);
387  const uint32_t index = hash >> (64 - b);
388  const uint8_t rank = get_rank(hash << b, 64 - b);
389  uint8_t* M = reinterpret_cast<uint8_t*>(*agg);
390  M[index] = std::max(M[index], rank);
391 }
FORCE_INLINE uint8_t get_rank(uint64_t x, uint32_t b)
RUNTIME_EXPORT NEVER_INLINE DEVICE uint64_t MurmurHash64A(const void *key, int len, uint64_t seed)
Definition: MurmurHash.cpp:27

+ Here is the call graph for this function:

GPU_RT_STUB void agg_approximate_count_distinct_gpu ( int64_t *  ,
const int64_t  ,
const uint32_t  ,
const int64_t  ,
const int64_t   
)

Definition at line 393 of file RuntimeFunctions.cpp.

397  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count ( uint64_t *  agg,
const int64_t   
)

Definition at line 360 of file RuntimeFunctions.cpp.

Referenced by agg_count_skip_val(), and anonymous_namespace{GroupByAndAggregate.cpp}::get_agg_count().

360  {
361  return (*agg)++;
362 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val 
)

Definition at line 365 of file RuntimeFunctions.cpp.

Referenced by agg_count_distinct_bitmap_skip_val(), WindowFunctionContext::fillPartitionEnd(), WindowFunctionContext::fillPartitionStart(), anonymous_namespace{WindowContext.cpp}::index_to_partition_end(), and InValuesBitmap::InValuesBitmap().

365  {
366  const uint64_t bitmap_idx = val - min_val;
367  reinterpret_cast<int8_t*>(*agg)[bitmap_idx >> 3] |= (1 << (bitmap_idx & 7));
368 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 376 of file RuntimeFunctions.cpp.

382  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val,
const int64_t  skip_val 
)

Definition at line 1122 of file RuntimeFunctions.cpp.

References agg_count_distinct_bitmap().

1126  {
1127  if (val != skip_val) {
1128  agg_count_distinct_bitmap(agg, val, min_val);
1129  }
1130 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap(int64_t *agg, const int64_t val, const int64_t min_val)

+ Here is the call graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 1132 of file RuntimeFunctions.cpp.

1139  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double ( uint64_t *  agg,
const double  val 
)

Definition at line 1358 of file RuntimeFunctions.cpp.

Referenced by agg_count_double_skip_val().

1359  {
1360  return (*agg)++;
1361 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double_skip_val ( uint64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 1464 of file RuntimeFunctions.cpp.

References agg_count_double().

1464  {
1465  if (val != skip_val) {
1466  return agg_count_double(agg, val);
1467  }
1468  return *agg;
1469 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double(uint64_t *agg, const double val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float ( uint32_t *  agg,
const float  val 
)

Definition at line 1411 of file RuntimeFunctions.cpp.

Referenced by agg_count_float_skip_val().

1412  {
1413  return (*agg)++;
1414 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float_skip_val ( uint32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 1472 of file RuntimeFunctions.cpp.

References agg_count_float().

1472  {
1473  if (val != skip_val) {
1474  return agg_count_float(agg, val);
1475  }
1476  return *agg;
1477 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float(uint32_t *agg, const float val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_if ( uint64_t *  agg,
const int64_t  cond 
)

Definition at line 1275 of file RuntimeFunctions.cpp.

Referenced by agg_count_if_skip_val().

1276  {
1277  return cond ? (*agg)++ : *agg;
1278 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_if_int32 ( uint32_t *  agg,
const int32_t  cond 
)

Definition at line 1146 of file RuntimeFunctions.cpp.

Referenced by agg_count_if_int32_skip_val().

1147  {
1148  return cond ? (*agg)++ : *agg;
1149 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_if_int32_skip_val ( uint32_t *  agg,
const int32_t  cond,
const int32_t  skip_val 
)

Definition at line 1305 of file RuntimeFunctions.cpp.

References agg_count_if_int32().

1305  {
1306  if (cond != skip_val) {
1307  return agg_count_if_int32(agg, cond);
1308  }
1309  return *agg;
1310 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_if_int32(uint32_t *agg, const int32_t cond)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_if_skip_val ( uint64_t *  agg,
const int64_t  cond,
const int64_t  skip_val 
)

Definition at line 1289 of file RuntimeFunctions.cpp.

References agg_count_if().

1289  {
1290  if (cond != skip_val) {
1291  return agg_count_if(agg, cond);
1292  }
1293  return *agg;
1294 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_if(uint64_t *agg, const int64_t cond)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32 ( uint32_t *  agg,
const int32_t   
)

Definition at line 1141 of file RuntimeFunctions.cpp.

Referenced by agg_count_int32_skip_val().

1142  {
1143  return (*agg)++;
1144 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32_skip_val ( uint32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1297 of file RuntimeFunctions.cpp.

References agg_count_int32().

1297  {
1298  if (val != skip_val) {
1299  return agg_count_int32(agg, val);
1300  }
1301  return *agg;
1302 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32(uint32_t *agg, const int32_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_skip_val ( uint64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1281 of file RuntimeFunctions.cpp.

References agg_count().

1281  {
1282  if (val != skip_val) {
1283  return agg_count(agg, val);
1284  }
1285  return *agg;
1286 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count(uint64_t *agg, const int64_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_id ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1091 of file RuntimeFunctions.cpp.

1091  {
1092  *agg = val;
1093 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double ( int64_t *  agg,
const double  val 
)

Definition at line 1389 of file RuntimeFunctions.cpp.

1390  {
1391  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
1392 }
GPU_RT_STUB void agg_id_double_shared_slow ( int64_t *  agg,
const double *  val 
)

Definition at line 1670 of file RuntimeFunctions.cpp.

1670 {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float ( int32_t *  agg,
const float  val 
)

Definition at line 1442 of file RuntimeFunctions.cpp.

1442  {
1443  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
1444 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t* agg_id_varlen ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 1095 of file RuntimeFunctions.cpp.

1098  {
1099  for (auto i = 0; i < size_bytes; i++) {
1100  varlen_buffer[offset + i] = value[i];
1101  }
1102  return &varlen_buffer[offset];
1103 }
GPU_RT_STUB int8_t* agg_id_varlen_shared ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 1610 of file RuntimeFunctions.cpp.

1613  {
1614  return nullptr;
1615 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_if_sum_float ( int32_t *  agg,
const float  val,
const int8_t  cond 
)

Definition at line 1422 of file RuntimeFunctions.cpp.

References agg_sum_float().

1424  {
1425  if (cond) {
1426  agg_sum_float(agg, val);
1427  }
1428 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float(int32_t *agg, const float val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_max ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1083 of file RuntimeFunctions.cpp.

1083  {
1084  *agg = std::max(*agg, val);
1085 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double ( int64_t *  agg,
const double  val 
)

Definition at line 1377 of file RuntimeFunctions.cpp.

1378  {
1379  const auto r = std::max(*reinterpret_cast<const double*>(agg), val);
1380  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
1381 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float ( int32_t *  agg,
const float  val 
)

Definition at line 1430 of file RuntimeFunctions.cpp.

1431  {
1432  const auto r = std::max(*reinterpret_cast<const float*>(agg), val);
1433  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
1434 }
GPU_RT_STUB void agg_max_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 1654 of file RuntimeFunctions.cpp.

1656  {}
GPU_RT_STUB void agg_max_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 1658 of file RuntimeFunctions.cpp.

1660  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_min ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1087 of file RuntimeFunctions.cpp.

1087  {
1088  *agg = std::min(*agg, val);
1089 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double ( int64_t *  agg,
const double  val 
)

Definition at line 1383 of file RuntimeFunctions.cpp.

1384  {
1385  const auto r = std::min(*reinterpret_cast<const double*>(agg), val);
1386  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
1387 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float ( int32_t *  agg,
const float  val 
)

Definition at line 1436 of file RuntimeFunctions.cpp.

1437  {
1438  const auto r = std::min(*reinterpret_cast<const float*>(agg), val);
1439  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
1440 }
GPU_RT_STUB void agg_min_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 1662 of file RuntimeFunctions.cpp.

1664  {}
GPU_RT_STUB void agg_min_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 1666 of file RuntimeFunctions.cpp.

1668  {}
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1071 of file RuntimeFunctions.cpp.

Referenced by agg_sum_if(), and agg_sum_skip_val().

1071  {
1072  const auto old = *agg;
1073  *agg += val;
1074  return old;
1075 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double ( int64_t *  agg,
const double  val 
)

Definition at line 1363 of file RuntimeFunctions.cpp.

Referenced by agg_sum_if_double().

1364  {
1365  const auto r = *reinterpret_cast<const double*>(agg) + val;
1366  *agg = *reinterpret_cast<const int64_t*>(may_alias_ptr(&r));
1367 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_sum_double_shared ( int64_t *  agg,
const double  val 
)

Definition at line 1704 of file RuntimeFunctions.cpp.

1704 {}
GPU_RT_STUB void agg_sum_double_skip_val_shared ( int64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 1706 of file RuntimeFunctions.cpp.

1708  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float ( int32_t *  agg,
const float  val 
)

Definition at line 1416 of file RuntimeFunctions.cpp.

Referenced by agg_if_sum_float().

1417  {
1418  const auto r = *reinterpret_cast<const float*>(agg) + val;
1419  *agg = *reinterpret_cast<const int32_t*>(may_alias_ptr(&r));
1420 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_sum_float_shared ( int32_t *  agg,
const float  val 
)

Definition at line 1709 of file RuntimeFunctions.cpp.

1709 {}
GPU_RT_STUB void agg_sum_float_skip_val_shared ( int32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 1711 of file RuntimeFunctions.cpp.

1713  {}
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_if ( int64_t *  agg,
const int64_t  val,
const int8_t  cond 
)

Definition at line 1077 of file RuntimeFunctions.cpp.

References agg_sum().

1079  {
1080  return cond ? agg_sum(agg, val) : *agg;
1081 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum(int64_t *agg, const int64_t val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_if_double ( int64_t *  agg,
const double  val,
const int8_t  cond 
)

Definition at line 1369 of file RuntimeFunctions.cpp.

References agg_sum_double().

1371  {
1372  if (cond) {
1373  agg_sum_double(agg, val);
1374  }
1375 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double(int64_t *agg, const double val)

+ Here is the call graph for this function:

GPU_RT_STUB void agg_sum_if_double_shared ( int64_t *  agg,
const double  val,
const int8_t  cond 
)

Definition at line 1728 of file RuntimeFunctions.cpp.

1730  {}
GPU_RT_STUB void agg_sum_if_double_skip_val_shared ( int64_t *  agg,
const double  val,
const double  skip_val,
const int8_t  cond 
)

Definition at line 1732 of file RuntimeFunctions.cpp.

1735  {}
GPU_RT_STUB void agg_sum_if_float_shared ( int32_t *  agg,
const float  val,
const int8_t  cond 
)

Definition at line 1736 of file RuntimeFunctions.cpp.

1738  {}
GPU_RT_STUB void agg_sum_if_float_skip_val_shared ( int32_t *  agg,
const float  val,
const float  skip_val,
const int8_t  cond 
)

Definition at line 1740 of file RuntimeFunctions.cpp.

1743  {}
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_if_int32 ( int32_t *  agg,
const int32_t  val,
const int8_t  cond 
)

Definition at line 1158 of file RuntimeFunctions.cpp.

References agg_sum_int32().

1160  {
1161  return cond ? agg_sum_int32(agg, val) : *agg;
1162 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32(int32_t *agg, const int32_t val)

+ Here is the call graph for this function:

GPU_RT_STUB int32_t agg_sum_if_int32_shared ( int32_t *  agg,
const int32_t  val,
const int8_t  cond 
)

Definition at line 1715 of file RuntimeFunctions.cpp.

1717  {
1718  return 0;
1719 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_if_int32_skip_val ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val,
const int8_t  cond 
)

Definition at line 1268 of file RuntimeFunctions.cpp.

References agg_sum_int32_skip_val().

1271  {
1272  return cond ? agg_sum_int32_skip_val(agg, val, skip_val) : *agg;
1273 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32_skip_val(int32_t *agg, const int32_t val, const int32_t skip_val)

+ Here is the call graph for this function:

GPU_RT_STUB int32_t agg_sum_if_int32_skip_val_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val,
const int8_t  cond 
)

Definition at line 1721 of file RuntimeFunctions.cpp.

1724  {
1725  return 0;
1726 }
GPU_RT_STUB int64_t agg_sum_if_shared ( int64_t *  agg,
const int64_t  val,
const int8_t  cond 
)

Definition at line 1676 of file RuntimeFunctions.cpp.

1678  {
1679  return 0;
1680 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_if_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val,
const int8_t  cond 
)

Definition at line 1260 of file RuntimeFunctions.cpp.

References agg_sum_skip_val().

1263  {
1264  return cond ? agg_sum_skip_val(agg, val, skip_val) : *agg;
1265 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_skip_val(int64_t *agg, const int64_t val, const int64_t skip_val)

+ Here is the call graph for this function:

GPU_RT_STUB int64_t agg_sum_if_skip_val_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val,
const int8_t  cond 
)

Definition at line 1688 of file RuntimeFunctions.cpp.

1691  {
1692  return 0;
1693 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32 ( int32_t *  agg,
const int32_t  val 
)

Definition at line 1151 of file RuntimeFunctions.cpp.

Referenced by agg_sum_if_int32(), and agg_sum_int32_skip_val().

1152  {
1153  const auto old = *agg;
1154  *agg += val;
1155  return old;
1156 }

+ Here is the caller graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_shared ( int32_t *  agg,
const int32_t  val 
)

Definition at line 1694 of file RuntimeFunctions.cpp.

1694  {
1695  return 0;
1696 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32_skip_val ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1247 of file RuntimeFunctions.cpp.

References agg_sum_int32().

Referenced by agg_sum_if_int32_skip_val().

1247  {
1248  const auto old = *agg;
1249  if (val != skip_val) {
1250  if (old != skip_val) {
1251  return agg_sum_int32(agg, val);
1252  } else {
1253  *agg = val;
1254  }
1255  }
1256  return old;
1257 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32(int32_t *agg, const int32_t val)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1698 of file RuntimeFunctions.cpp.

1700  {
1701  return 0;
1702 }
GPU_RT_STUB int64_t agg_sum_shared ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1672 of file RuntimeFunctions.cpp.

1672  {
1673  return 0;
1674 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1232 of file RuntimeFunctions.cpp.

References agg_sum().

Referenced by agg_sum_if_skip_val(), and Executor::reduceResults().

1234  {
1235  const auto old = *agg;
1236  if (val != skip_val) {
1237  if (old != skip_val) {
1238  return agg_sum(agg, val);
1239  } else {
1240  *agg = val;
1241  }
1242  }
1243  return old;
1244 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum(int64_t *agg, const int64_t val)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t agg_sum_skip_val_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1682 of file RuntimeFunctions.cpp.

1684  {
1685  return 0;
1686 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set ( const int64_t  bitset,
const int64_t  val,
const int64_t  min_val,
const int64_t  max_val,
const int64_t  null_val,
const int8_t  null_bool_val 
)

Definition at line 399 of file RuntimeFunctions.cpp.

404  {
405  if (val == null_val) {
406  return null_bool_val;
407  }
408  if (val < min_val || val > max_val) {
409  return 0;
410  }
411  if (!bitset) {
412  return 0;
413  }
414  const uint64_t bitmap_idx = val - min_val;
415  return (reinterpret_cast<const int8_t*>(bitset))[bitmap_idx >> 3] &
416  (1 << (bitmap_idx & 7))
417  ? 1
418  : 0;
419 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length ( const char *  str,
const int32_t  str_len 
)

Definition at line 2080 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::set< shared::TableKey > >::visit().

2080  {
2081  return str_len;
2082 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length_nullable ( const char *  str,
const int32_t  str_len,
const int32_t  int_null 
)

Definition at line 2085 of file RuntimeFunctions.cpp.

2085  {
2086  if (!str) {
2087  return int_null;
2088  }
2089  return str_len;
2090 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool check_interrupt ( )

Definition at line 2386 of file RuntimeFunctions.cpp.

References check_interrupt_init(), INT_CHECK, and runtime_interrupt_flag.

2386  {
2387  if (check_interrupt_init(static_cast<unsigned>(INT_CHECK))) {
2388  return true;
2389  }
2390  return false;
2391 }
RUNTIME_EXPORT bool check_interrupt_init(unsigned command)

+ Here is the call graph for this function:

RUNTIME_EXPORT bool check_interrupt_init ( unsigned  command)

Definition at line 2393 of file RuntimeFunctions.cpp.

References INT_ABORT, INT_CHECK, INT_RESET, and runtime_interrupt_flag.

Referenced by check_interrupt(), Executor::interrupt(), and Executor::resetInterrupt().

2393  {
2394  static std::atomic_bool runtime_interrupt_flag{false};
2395 
2396  if (command == static_cast<unsigned>(INT_CHECK)) {
2397  if (runtime_interrupt_flag.load()) {
2398  return true;
2399  }
2400  return false;
2401  }
2402  if (command == static_cast<unsigned>(INT_ABORT)) {
2403  runtime_interrupt_flag.store(true);
2404  return false;
2405  }
2406  if (command == static_cast<unsigned>(INT_RESET)) {
2407  runtime_interrupt_flag.store(false);
2408  return false;
2409  }
2410  return false;
2411 }
__device__ int32_t runtime_interrupt_flag
Definition: cuda_mapd_rt.cu:95

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 1106 of file RuntimeFunctions.cpp.

1106  {
1107  if (val == null_val) {
1108  return 0;
1109  }
1110 
1111  if (*agg == val) {
1112  return 0;
1113  } else if (*agg == null_val) {
1114  *agg = val;
1115  return 0;
1116  } else {
1117  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1118  return 15;
1119  }
1120 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_double ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 1395 of file RuntimeFunctions.cpp.

1395  {
1396  if (val == null_val) {
1397  return 0;
1398  }
1399 
1400  if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)))) {
1401  return 0;
1402  } else if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&null_val)))) {
1403  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
1404  return 0;
1405  } else {
1406  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1407  return 15;
1408  }
1409 }
GPU_RT_STUB int32_t checked_single_agg_id_double_shared ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 1642 of file RuntimeFunctions.cpp.

1644  {
1645  return 0;
1646 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_float ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 1447 of file RuntimeFunctions.cpp.

1447  {
1448  if (val == null_val) {
1449  return 0;
1450  }
1451 
1452  if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)))) {
1453  return 0;
1454  } else if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&null_val)))) {
1455  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
1456  return 0;
1457  } else {
1458  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1459  return 15;
1460  }
1461 }
GPU_RT_STUB int32_t checked_single_agg_id_float_shared ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 1648 of file RuntimeFunctions.cpp.

1650  {
1651  return 0;
1652 }
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  null_val 
)

Definition at line 1630 of file RuntimeFunctions.cpp.

1632  {
1633  return 0;
1634 }
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  null_val 
)

Definition at line 1624 of file RuntimeFunctions.cpp.

1626  {
1627  return 0;
1628 }
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  null_val 
)

Definition at line 1635 of file RuntimeFunctions.cpp.

1637  {
1638  return 0;
1639 }
GPU_RT_STUB int32_t checked_single_agg_id_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 1617 of file RuntimeFunctions.cpp.

1619  {
1620  return 0;
1621 }
template<typename T , typename Comparator >
int64_t compute_current_row_idx_in_frame ( const int64_t  num_elems,
const int64_t  cur_row_idx,
const T *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const T  null_val,
const bool  nulls_first,
const int64_t  null_start_pos,
const int64_t  null_end_pos,
Comparator  cmp 
)
inline

Definition at line 451 of file RuntimeFunctions.cpp.

References get_valid_buf_end_pos(), and get_valid_buf_start_pos().

460  {
461  const auto target_value = col_buf[cur_row_idx];
462  if (target_value == null_val) {
463  for (int64_t target_offset = null_start_pos; target_offset < null_end_pos;
464  target_offset++) {
465  const auto candidate_offset = partition_rowid_buf[ordered_index_buf[target_offset]];
466  if (candidate_offset == cur_row_idx) {
467  return target_offset;
468  }
469  }
470  }
471  auto const modified_null_end_pos = nulls_first ? null_end_pos - 1 : null_end_pos;
472  int64_t l = get_valid_buf_start_pos(null_start_pos, modified_null_end_pos);
473  int64_t h = get_valid_buf_end_pos(num_elems, null_start_pos, modified_null_end_pos);
474  while (l < h) {
475  int64_t mid = l + (h - l) / 2;
476  auto const target_row_idx = partition_rowid_buf[ordered_index_buf[mid]];
477  auto const cur_value = col_buf[target_row_idx];
478  if (cmp(target_value, cur_value)) {
479  h = mid;
480  } else {
481  l = mid + 1;
482  }
483  }
484  int64_t target_offset = l;
485  int64_t candidate_row_idx = partition_rowid_buf[ordered_index_buf[target_offset]];
486  while (col_buf[candidate_row_idx] == target_value) {
487  if (candidate_row_idx == cur_row_idx) {
488  return target_offset;
489  }
490  candidate_row_idx = partition_rowid_buf[ordered_index_buf[++target_offset]];
491  }
492  return -1;
493 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)

+ Here is the call graph for this function:

template<typename AGG_VALUE_TYPE >
void compute_derived_aggregates ( SumAndCountPair< AGG_VALUE_TYPE > *  aggregation_tree_for_partition,
SumAndCountPair< AGG_VALUE_TYPE > &  res,
size_t  query_range_start_idx,
size_t  query_range_end_idx,
size_t  leaf_level,
size_t  tree_fanout,
AGG_VALUE_TYPE  invalid_val,
AGG_VALUE_TYPE  null_val 
)
inline

Definition at line 953 of file RuntimeFunctions.cpp.

References SumAndCountPair< T >::count, getStartOffsetForSegmentTreeTraversal(), and SumAndCountPair< T >::sum.

961  {
962  size_t leaf_start_idx = getStartOffsetForSegmentTreeTraversal(leaf_level, tree_fanout);
963  size_t begin = leaf_start_idx + query_range_start_idx;
964  size_t end = leaf_start_idx + query_range_end_idx;
965  SumAndCountPair<AGG_VALUE_TYPE> null_res{null_val, 0};
966  SumAndCountPair<AGG_VALUE_TYPE> invalid_res{invalid_val, 0};
967  bool all_nulls = true;
968  for (int level = leaf_level; level >= 0; level--) {
969  size_t parentBegin = begin / tree_fanout;
970  size_t parentEnd = (end - 1) / tree_fanout;
971  if (parentBegin == parentEnd) {
972  for (size_t pos = begin; pos < end; pos++) {
973  if (aggregation_tree_for_partition[pos].sum != null_val) {
974  all_nulls = false;
975  res.sum += aggregation_tree_for_partition[pos].sum;
976  res.count += aggregation_tree_for_partition[pos].count;
977  }
978  }
979  if (all_nulls) {
980  res = null_res;
981  }
982  return;
983  } else if (parentBegin > parentEnd) {
984  res = null_res;
985  return;
986  }
987  size_t group_begin = (parentBegin * tree_fanout) + 1;
988  if (begin != group_begin) {
989  size_t limit = (parentBegin * tree_fanout) + tree_fanout + 1;
990  for (size_t pos = begin; pos < limit; pos++) {
991  if (aggregation_tree_for_partition[pos].sum != null_val) {
992  all_nulls = false;
993  res.sum += aggregation_tree_for_partition[pos].sum;
994  res.count += aggregation_tree_for_partition[pos].count;
995  }
996  }
997  parentBegin++;
998  }
999  size_t group_end = (parentEnd * tree_fanout) + 1;
1000  if (end != group_end) {
1001  for (size_t pos = group_end; pos < end; pos++) {
1002  if (aggregation_tree_for_partition[pos].sum != null_val) {
1003  all_nulls = false;
1004  res.sum += aggregation_tree_for_partition[pos].sum;
1005  res.count += aggregation_tree_for_partition[pos].count;
1006  }
1007  }
1008  }
1009  begin = parentBegin;
1010  end = parentEnd;
1011  }
1012  res = invalid_res;
1013  return;
1014 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal(size_t level, size_t tree_fanout)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_int64_t_lower_bound ( const int64_t  entry_cnt,
const int64_t  target_value,
const int64_t *  col_buf 
)

Definition at line 422 of file RuntimeFunctions.cpp.

424  {
425  int64_t l = 0;
426  int64_t h = entry_cnt - 1;
427  while (l < h) {
428  int64_t mid = l + (h - l) / 2;
429  if (target_value < col_buf[mid]) {
430  h = mid;
431  } else {
432  l = mid + 1;
433  }
434  }
435  return l;
436 }
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_lower_bound_from_ordered_partition_index ( const int64_t  num_elems,
const TARGET_VAL_TYPE  target_val,
const COL_TYPE *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const NULL_TYPE  null_val,
const bool  nulls_first,
const int64_t  null_start_offset,
const int64_t  null_end_offset 
)
inline

Definition at line 533 of file RuntimeFunctions.cpp.

References get_valid_buf_end_pos(), and get_valid_buf_start_pos().

542  {
543  if (target_val == null_val) {
544  return null_start_offset;
545  }
546  auto const modified_null_end_pos = nulls_first ? null_end_offset - 1 : null_end_offset;
547  int64_t l = get_valid_buf_start_pos(null_start_offset, modified_null_end_pos);
548  int64_t h = get_valid_buf_end_pos(num_elems, null_start_offset, modified_null_end_pos);
549  while (l < h) {
550  int64_t mid = l + (h - l) / 2;
551  if (target_val <= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) {
552  h = mid;
553  } else {
554  l = mid + 1;
555  }
556  }
557  return l;
558 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_end_index_add ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound,
int64_t  num_current_partition_elem 
)

Definition at line 782 of file RuntimeFunctions.cpp.

785  {
786  int64_t index = candidate_index - current_partition_start_offset + frame_bound;
787  return index >= num_current_partition_elem ? num_current_partition_elem : index + 1;
788 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_end_index_sub ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound 
)

Definition at line 774 of file RuntimeFunctions.cpp.

776  {
777  int64_t index = candidate_index - current_partition_start_offset - frame_bound;
778  return index < 0 ? 0 : index + 1;
779 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_start_index_add ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound,
int64_t  num_current_partition_elem 
)

Definition at line 765 of file RuntimeFunctions.cpp.

768  {
769  int64_t index = candidate_index - current_partition_start_offset + frame_bound;
770  return index >= num_current_partition_elem ? num_current_partition_elem : index;
771 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_start_index_sub ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound 
)

Definition at line 757 of file RuntimeFunctions.cpp.

759  {
760  int64_t index = candidate_index - current_partition_start_offset - frame_bound;
761  return index < 0 ? 0 : index;
762 }
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_upper_bound_from_ordered_partition_index ( const int64_t  num_elems,
const TARGET_VAL_TYPE  target_val,
const COL_TYPE *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const NULL_TYPE  null_val,
const bool  nulls_first,
const int64_t  null_start_offset,
const int64_t  null_end_offset 
)
inline

Definition at line 614 of file RuntimeFunctions.cpp.

References get_valid_buf_end_pos(), and get_valid_buf_start_pos().

623  {
624  if (target_val == null_val) {
625  return null_end_offset;
626  }
627  auto const modified_null_end_pos = nulls_first ? null_end_offset - 1 : null_end_offset;
628  int64_t l = get_valid_buf_start_pos(null_start_offset, modified_null_end_pos);
629  int64_t h = get_valid_buf_end_pos(num_elems, null_start_offset, modified_null_end_pos);
630  while (l < h) {
631  int64_t mid = l + (h - l) / 2;
632  if (target_val >= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) {
633  l = mid + 1;
634  } else {
635  h = mid;
636  }
637  }
638  return l;
639 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)

+ Here is the call graph for this function:

template<AggFuncType AGG_FUNC_TYPE, typename AGG_TYPE >
AGG_TYPE compute_window_func_via_aggregation_tree ( AGG_TYPE *  aggregation_tree_for_partition,
size_t  query_range_start_idx,
size_t  query_range_end_idx,
size_t  leaf_level,
size_t  tree_fanout,
AGG_TYPE  init_val,
AGG_TYPE  invalid_val,
AGG_TYPE  null_val 
)
inline

Definition at line 841 of file RuntimeFunctions.cpp.

References getStartOffsetForSegmentTreeTraversal(), and run_benchmark_import::res.

849  {
850  size_t leaf_start_idx = getStartOffsetForSegmentTreeTraversal(leaf_level, tree_fanout);
851  size_t begin = leaf_start_idx + query_range_start_idx;
852  size_t end = leaf_start_idx + query_range_end_idx;
853  AGG_TYPE res = init_val;
854  bool all_nulls = true;
855  for (int level = leaf_level; level >= 0; level--) {
856  size_t parentBegin = begin / tree_fanout;
857  size_t parentEnd = (end - 1) / tree_fanout;
858  if (parentBegin == parentEnd) {
859  for (size_t pos = begin; pos < end; pos++) {
860  if (aggregation_tree_for_partition[pos] != null_val) {
861  all_nulls = false;
862  res = agg_func<AGG_FUNC_TYPE>(res, aggregation_tree_for_partition[pos]);
863  }
864  }
865  return all_nulls ? null_val : res;
866  } else if (parentBegin > parentEnd) {
867  return null_val;
868  }
869  size_t group_begin = (parentBegin * tree_fanout) + 1;
870  if (begin != group_begin) {
871  size_t limit = (parentBegin * tree_fanout) + tree_fanout + 1;
872  for (size_t pos = begin; pos < limit; pos++) {
873  if (aggregation_tree_for_partition[pos] != null_val) {
874  all_nulls = false;
875  res = agg_func<AGG_FUNC_TYPE>(res, aggregation_tree_for_partition[pos]);
876  }
877  }
878  parentBegin++;
879  }
880  size_t group_end = (parentEnd * tree_fanout) + 1;
881  if (end != group_end) {
882  for (size_t pos = group_end; pos < end; pos++) {
883  if (aggregation_tree_for_partition[pos] != null_val) {
884  all_nulls = false;
885  res = agg_func<AGG_FUNC_TYPE>(res, aggregation_tree_for_partition[pos]);
886  }
887  }
888  }
889  begin = parentBegin;
890  end = parentEnd;
891  }
892  return invalid_val;
893 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal(size_t level, size_t tree_fanout)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_ceil ( const int64_t  x,
const int64_t  scale 
)

Definition at line 1532 of file RuntimeFunctions.cpp.

References decimal_floor().

1533  {
1534  return decimal_floor(x, scale) + (x % scale ? scale : 0);
1535 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor(const int64_t x, const int64_t scale)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor ( const int64_t  x,
const int64_t  scale 
)

Definition at line 1521 of file RuntimeFunctions.cpp.

Referenced by decimal_ceil().

1522  {
1523  if (x >= 0) {
1524  return x / scale * scale;
1525  }
1526  if (!(x % scale)) {
1527  return x;
1528  }
1529  return x / scale * scale - scale;
1530 }

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t* declare_dynamic_shared_memory ( )

Definition at line 1781 of file RuntimeFunctions.cpp.

1781  {
1782  return nullptr;
1783 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t encode_date ( int64_t  decoded_val,
int64_t  null_val,
int64_t  multiplier 
)

Definition at line 750 of file RuntimeFunctions.cpp.

752  {
753  return decoded_val == null_val ? decoded_val : decoded_val * multiplier;
754 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs ( const int64_t  dividend,
const int64_t  divisor 
)

Definition at line 231 of file RuntimeFunctions.cpp.

Referenced by floor_div_nullable_lhs().

232  {
233  return (dividend < 0 ? dividend - (divisor - 1) : dividend) / divisor;
234 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_nullable_lhs ( const int64_t  dividend,
const int64_t  divisor,
const int64_t  null_val 
)

Definition at line 239 of file RuntimeFunctions.cpp.

References floor_div_lhs().

241  {
242  return dividend == null_val ? null_val : floor_div_lhs(dividend, divisor);
243 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs(const int64_t dividend, const int64_t divisor)

+ Here is the call graph for this function:

GPU_RT_STUB void force_sync ( )

Definition at line 1745 of file RuntimeFunctions.cpp.

1745 {}
GPU_RT_STUB int64_t get_block_index ( )

Definition at line 1785 of file RuntimeFunctions.cpp.

1785  {
1786  return 0;
1787 }
RUNTIME_EXPORT ALWAYS_INLINE double* get_double_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 796 of file RuntimeFunctions.cpp.

798  {
799  double** casted_aggregation_trees = reinterpret_cast<double**>(aggregation_trees);
800  return casted_aggregation_trees[partition_idx];
801 }
RUNTIME_EXPORT ALWAYS_INLINE SumAndCountPair<double>* get_double_derived_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 811 of file RuntimeFunctions.cpp.

811  {
812  SumAndCountPair<double>** casted_aggregation_trees =
813  reinterpret_cast<SumAndCountPair<double>**>(aggregation_trees);
814  return casted_aggregation_trees[partition_idx];
815 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_error_code ( int32_t *  error_codes)

Definition at line 1804 of file RuntimeFunctions.cpp.

References pos_start_impl().

1804  {
1805  return error_codes[pos_start_impl(nullptr)];
1806 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad 
)

Definition at line 2046 of file RuntimeFunctions.cpp.

2051  {
2052  return groups_buffer + row_size_quad * (key - min_key);
2053 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless_semiprivate ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad,
const uint8_t  thread_warp_idx,
const uint8_t  warp_size 
)

Definition at line 2055 of file RuntimeFunctions.cpp.

2062  {
2063  return groups_buffer + row_size_quad * (warp_size * (key - min_key) + thread_warp_idx);
2064 }
__device__ int8_t thread_warp_idx(const int8_t warp_sz)
Definition: cuda_mapd_rt.cu:39
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_integer_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 790 of file RuntimeFunctions.cpp.

792  {
793  return aggregation_trees[partition_idx];
794 }
RUNTIME_EXPORT ALWAYS_INLINE SumAndCountPair<int64_t>* get_integer_derived_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 804 of file RuntimeFunctions.cpp.

804  {
805  SumAndCountPair<int64_t>** casted_aggregation_trees =
806  reinterpret_cast<SumAndCountPair<int64_t>**>(aggregation_trees);
807  return casted_aggregation_trees[partition_idx];
808 }
template<typename T >
ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const T *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 1874 of file RuntimeFunctions.cpp.

References align_to_int64(), and heavydb.dtypes::T.

1878  {
1879  auto off = h * row_size_quad;
1880  auto row_ptr = reinterpret_cast<T*>(groups_buffer + off);
1881  if (*row_ptr == get_empty_key<T>()) {
1882  memcpy(row_ptr, key, key_count * sizeof(T));
1883  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1884  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1885  }
1886  if (memcmp(row_ptr, key, key_count * sizeof(T)) == 0) {
1887  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1888  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1889  }
1890  return nullptr;
1891 }
FORCE_INLINE HOST DEVICE T align_to_int64(T addr)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width,
const uint32_t  row_size_quad 
)

Definition at line 1893 of file RuntimeFunctions.cpp.

References get_matching_group_value().

1899  {
1900  switch (key_width) {
1901  case 4:
1902  return get_matching_group_value(groups_buffer,
1903  h,
1904  reinterpret_cast<const int32_t*>(key),
1905  key_count,
1906  row_size_quad);
1907  case 8:
1908  return get_matching_group_value(groups_buffer, h, key, key_count, row_size_quad);
1909  default:;
1910  }
1911  return nullptr;
1912 }
__device__ int64_t * get_matching_group_value(int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_columnar ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_qw_count,
const size_t  entry_count 
)

Definition at line 1962 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

1967  {
1968  auto off = h;
1969  if (groups_buffer[off] == EMPTY_KEY_64) {
1970  for (size_t i = 0; i < key_qw_count; ++i) {
1971  groups_buffer[off] = key[i];
1972  off += entry_count;
1973  }
1974  return &groups_buffer[off];
1975  }
1976  off = h;
1977  for (size_t i = 0; i < key_qw_count; ++i) {
1978  if (groups_buffer[off] != key[i]) {
1979  return nullptr;
1980  }
1981  off += entry_count;
1982  }
1983  return &groups_buffer[off];
1984 }
#define EMPTY_KEY_64
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const T *  key,
const uint32_t  key_count 
)

Definition at line 1915 of file RuntimeFunctions.cpp.

References heavydb.dtypes::T.

1919  {
1920  auto off = h;
1921  auto key_buffer = reinterpret_cast<T*>(groups_buffer);
1922  if (key_buffer[off] == get_empty_key<T>()) {
1923  for (size_t i = 0; i < key_count; ++i) {
1924  key_buffer[off] = key[i];
1925  off += entry_count;
1926  }
1927  return h;
1928  }
1929  off = h;
1930  for (size_t i = 0; i < key_count; ++i) {
1931  if (key_buffer[off] != key[i]) {
1932  return -1;
1933  }
1934  off += entry_count;
1935  }
1936  return h;
1937 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width 
)

Definition at line 1940 of file RuntimeFunctions.cpp.

References get_matching_group_value_columnar_slot().

1945  {
1946  switch (key_width) {
1947  case 4:
1948  return get_matching_group_value_columnar_slot(groups_buffer,
1949  entry_count,
1950  h,
1951  reinterpret_cast<const int32_t*>(key),
1952  key_count);
1953  case 8:
1955  groups_buffer, entry_count, h, key, key_count);
1956  default:
1957  return -1;
1958  }
1959  return -1;
1960 }
__device__ int32_t get_matching_group_value_columnar_slot(int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 1997 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2002  {
2003  uint32_t off = hashed_index * row_size_quad;
2004  if (groups_buffer[off] == EMPTY_KEY_64) {
2005  for (uint32_t i = 0; i < key_count; ++i) {
2006  groups_buffer[off + i] = key[i];
2007  }
2008  }
2009  return groups_buffer + off + key_count;
2010 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash_keyless ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const uint32_t  row_size_quad 
)

For a particular hashed index (only used with multi-column perfect hash group by) it returns the row-wise offset of the group in the output buffer. Since it is intended for keyless hash use, it assumes there is no group columns prepending the output buffer.

Definition at line 2019 of file RuntimeFunctions.cpp.

2021  {
2022  return groups_buffer + row_size_quad * hashed_index;
2023 }
GPU_RT_STUB int64_t get_thread_index ( )

Definition at line 1777 of file RuntimeFunctions.cpp.

1777  {
1778  return 0;
1779 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos ( const int64_t  num_elems,
const int64_t  null_start_pos,
const int64_t  null_end_pos 
)

Definition at line 444 of file RuntimeFunctions.cpp.

Referenced by compute_current_row_idx_in_frame(), compute_lower_bound_from_ordered_partition_index(), and compute_upper_bound_from_ordered_partition_index().

446  {
447  return null_end_pos == num_elems ? null_start_pos : num_elems;
448 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos ( const int64_t  null_start_pos,
const int64_t  null_end_pos 
)

Definition at line 439 of file RuntimeFunctions.cpp.

Referenced by compute_current_row_idx_in_frame(), compute_lower_bound_from_ordered_partition_index(), and compute_upper_bound_from_ordered_partition_index().

439  {
440  return null_start_pos == 0 ? null_end_pos + 1 : 0;
441 }

+ Here is the caller graph for this function:

template<typename COL_TYPE , typename LOGICAL_TYPE >
LOGICAL_TYPE get_value_in_window_frame ( const int64_t  target_row_idx_in_frame,
const int64_t  frame_start_offset,
const int64_t  frame_end_offset,
const COL_TYPE *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const LOGICAL_TYPE  logical_null_val,
const LOGICAL_TYPE  col_null_val 
)
inline

Definition at line 695 of file RuntimeFunctions.cpp.

702  {
703  if (target_row_idx_in_frame < frame_start_offset ||
704  target_row_idx_in_frame > frame_end_offset) {
705  return logical_null_val;
706  }
707  const auto target_offset =
708  partition_rowid_buf[ordered_index_buf[target_row_idx_in_frame]];
709  LOGICAL_TYPE target_val = col_buf[target_offset];
710  if (target_val == col_null_val) {
711  return logical_null_val;
712  }
713  return target_val;
714 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal ( size_t  level,
size_t  tree_fanout 
)

Definition at line 818 of file RuntimeFunctions.cpp.

Referenced by compute_derived_aggregates(), and compute_window_func_via_aggregation_tree().

818  {
819  size_t offset = 0;
820  for (size_t i = 0; i < level; i++) {
821  offset += pow(tree_fanout, i);
822  }
823  return offset;
824 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ( )

Definition at line 1765 of file RuntimeFunctions.cpp.

References pos_start_impl().

1765  {
1766  return pos_start_impl(nullptr);
1767 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const bool  blocks_share_memory,
const int32_t  frag_idx 
)

Definition at line 1844 of file RuntimeFunctions.cpp.

1852  {
1853 #ifndef _WIN32
1854  // the body is not really needed, just make sure the call is not optimized away
1855  assert(groups_buffer);
1856 #endif
1857 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1830 of file RuntimeFunctions.cpp.

1837  {
1838 #ifndef _WIN32
1839  // the body is not really needed, just make sure the call is not optimized away
1840  assert(groups_buffer);
1841 #endif
1842 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1859 of file RuntimeFunctions.cpp.

1866  {
1867 #ifndef _WIN32
1868  // the body is not really needed, just make sure the call is not optimized away
1869  assert(groups_buffer);
1870 #endif
1871 }
RUNTIME_EXPORT int64_t* init_shared_mem ( const int64_t *  global_groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1825 of file RuntimeFunctions.cpp.

1826  {
1827  return nullptr;
1828 }
RUNTIME_EXPORT NEVER_INLINE const int64_t* init_shared_mem_nop ( const int64_t *  groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1810 of file RuntimeFunctions.cpp.

1812  {
1813  return groups_buffer;
1814 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t key_for_string_encoded ( const int32_t  str_id)

Definition at line 2093 of file RuntimeFunctions.cpp.

2093  {
2094  return str_id;
2095 }
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count ( uint8_t *  bitmap,
const uint32_t  bitmap_bytes,
const uint8_t *  key_bytes,
const uint32_t  key_len 
)

Definition at line 2280 of file RuntimeFunctions.cpp.

References MurmurHash3().

2284  {
2285  const uint32_t bit_pos = MurmurHash3(key_bytes, key_len, 0) % (bitmap_bytes * 8);
2286  const uint32_t word_idx = bit_pos / 32;
2287  const uint32_t bit_idx = bit_pos % 32;
2288  reinterpret_cast<uint32_t*>(bitmap)[word_idx] |= 1 << bit_idx;
2289 }
RUNTIME_EXPORT NEVER_INLINE DEVICE uint32_t MurmurHash3(const void *key, int len, const uint32_t seed)
Definition: MurmurHash.cpp:33

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal ( const int64_t *  sum,
const int64_t *  count,
const double  null_val,
const uint32_t  scale 
)

Definition at line 2259 of file RuntimeFunctions.cpp.

2262  {
2263  return *count != 0 ? (static_cast<double>(*sum) / pow(10, scale)) / *count : null_val;
2264 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double ( const int64_t *  agg,
const int64_t *  count,
const double  null_val 
)

Definition at line 2266 of file RuntimeFunctions.cpp.

2268  {
2269  return *count != 0 ? *reinterpret_cast<const double*>(may_alias_ptr(agg)) / *count
2270  : null_val;
2271 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float ( const int32_t *  agg,
const int32_t *  count,
const double  null_val 
)

Definition at line 2273 of file RuntimeFunctions.cpp.

2275  {
2276  return *count != 0 ? *reinterpret_cast<const float*>(may_alias_ptr(agg)) / *count
2277  : null_val;
2278 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int ( const int64_t *  sum,
const int64_t *  count,
const double  null_val 
)

Definition at line 2253 of file RuntimeFunctions.cpp.

2255  {
2256  return *count != 0 ? static_cast<double>(*sum) / *count : null_val;
2257 }
RUNTIME_EXPORT ALWAYS_INLINE double load_double ( const int64_t *  agg)

Definition at line 2245 of file RuntimeFunctions.cpp.

2245  {
2246  return *reinterpret_cast<const double*>(may_alias_ptr(agg));
2247 }
RUNTIME_EXPORT ALWAYS_INLINE float load_float ( const int32_t *  agg)

Definition at line 2249 of file RuntimeFunctions.cpp.

2249  {
2250  return *reinterpret_cast<const float*>(may_alias_ptr(agg));
2251 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 334 of file RuntimeFunctions.cpp.

336  {
337  if (lhs == null_val) {
338  return rhs == 0 ? rhs : null_val;
339  }
340  if (rhs == null_val) {
341  return lhs == 0 ? lhs : null_val;
342  }
343  return (lhs && rhs) ? 1 : 0;
344 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not ( const int8_t  operand,
const int8_t  null_val 
)

Definition at line 329 of file RuntimeFunctions.cpp.

330  {
331  return operand == null_val ? operand : (operand ? 0 : 1);
332 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 346 of file RuntimeFunctions.cpp.

348  {
349  if (lhs == null_val) {
350  return rhs == 0 ? null_val : rhs;
351  }
352  if (rhs == null_val) {
353  return lhs == 0 ? null_val : lhs;
354  }
355  return (lhs || rhs) ? 1 : 0;
356 }
ALWAYS_INLINE DEVICE int32_t map_string_dict_id ( const int32_t  string_id,
const int64_t  translation_map_handle,
const int32_t  min_source_id 
)

Definition at line 2098 of file RuntimeFunctions.cpp.

2100  {
2101  const int32_t* translation_map =
2102  reinterpret_cast<const int32_t*>(translation_map_handle);
2103  return translation_map[string_id - min_source_id];
2104 }
RUNTIME_EXPORT void multifrag_query ( const int8_t ***  col_buffers,
const uint64_t *  num_fragments,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
int32_t *  total_matched,
const int64_t *  init_agg_value,
int64_t **  out,
int32_t *  error_code,
const uint32_t *  num_tables_ptr,
const int64_t *  join_hash_tables,
const int8_t *  row_func_mgr 
)

Definition at line 2359 of file RuntimeFunctions.cpp.

References query_stub().

2370  {
2371  for (uint32_t i = 0; i < *num_fragments; ++i) {
2372  query_stub(col_buffers ? col_buffers[i] : nullptr,
2373  &num_rows[i * (*num_tables_ptr)],
2374  &frag_row_offsets[i * (*num_tables_ptr)],
2375  max_matched,
2376  init_agg_value,
2377  out,
2378  i,
2379  join_hash_tables,
2380  total_matched,
2381  error_code,
2382  row_func_mgr);
2383  }
2384 }
RUNTIME_EXPORT NEVER_INLINE void query_stub(const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched, const int8_t *row_func_mgr)

+ Here is the call graph for this function:

RUNTIME_EXPORT void multifrag_query_hoisted_literals ( const int8_t ***  col_buffers,
const uint64_t *  num_fragments,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
int32_t *  total_matched,
const int64_t *  init_agg_value,
int64_t **  out,
int32_t *  error_code,
const uint32_t *  num_tables_ptr,
const int64_t *  join_hash_tables,
const int8_t *  row_func_mgr 
)

Definition at line 2311 of file RuntimeFunctions.cpp.

References query_stub_hoisted_literals().

2324  {
2325  for (uint32_t i = 0; i < *num_fragments; ++i) {
2326  query_stub_hoisted_literals(col_buffers ? col_buffers[i] : nullptr,
2327  literals,
2328  &num_rows[i * (*num_tables_ptr)],
2329  &frag_row_offsets[i * (*num_tables_ptr)],
2330  max_matched,
2331  init_agg_value,
2332  out,
2333  i,
2334  join_hash_tables,
2335  total_matched,
2336  error_code,
2337  row_func_mgr);
2338  }
2339 }
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals(const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched, const int8_t *row_func_mgr)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 2239 of file RuntimeFunctions.cpp.

2241  {
2242  return reinterpret_cast<const double*>(output_buff)[pos];
2243 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl ( int32_t *  error_code)

Definition at line 1756 of file RuntimeFunctions.cpp.

1756  {
1757  int32_t row_index_resume{0};
1758  if (error_code) {
1759  row_index_resume = error_code[0];
1760  error_code[0] = 0;
1761  }
1762  return row_index_resume;
1763 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ( )

Definition at line 1769 of file RuntimeFunctions.cpp.

1769  {
1770  return 1;
1771 }
RUNTIME_EXPORT NEVER_INLINE void query_stub ( const int8_t **  col_buffers,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
int64_t **  out,
uint32_t  frag_idx,
const int64_t *  join_hash_tables,
int32_t *  error_code,
int32_t *  total_matched,
const int8_t *  row_func_mgr 
)

Definition at line 2341 of file RuntimeFunctions.cpp.

Referenced by multifrag_query().

2351  {
2352 #ifndef _WIN32
2353  assert(col_buffers || num_rows || frag_row_offsets || max_matched || init_agg_value ||
2354  out || frag_idx || error_code || join_hash_tables || total_matched ||
2355  row_func_mgr);
2356 #endif
2357 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals ( const int8_t **  col_buffers,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
int64_t **  out,
uint32_t  frag_idx,
const int64_t *  join_hash_tables,
int32_t *  error_code,
int32_t *  total_matched,
const int8_t *  row_func_mgr 
)

Definition at line 2291 of file RuntimeFunctions.cpp.

Referenced by multifrag_query_hoisted_literals().

2303  {
2304 #ifndef _WIN32
2305  assert(col_buffers || literals || num_rows || frag_row_offsets || max_matched ||
2306  init_agg_value || out || frag_idx || error_code || join_hash_tables ||
2307  total_matched || row_func_mgr);
2308 #endif
2309 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void record_error_code ( const int32_t  err_code,
int32_t *  error_codes 
)

Definition at line 1791 of file RuntimeFunctions.cpp.

References pos_start_impl().

1792  {
1793  // NB: never override persistent error codes (with code greater than zero).
1794  // On GPU, a projection query with a limit can run out of slots without it
1795  // being an actual error if the limit has been hit. If a persistent error
1796  // (division by zero, for example) occurs before running out of slots, we
1797  // have to avoid overriding it, because there's a risk that the query would
1798  // go through if we override with a potentially benign out-of-slots code.
1799  if (err_code && error_codes[pos_start_impl(nullptr)] <= 0) {
1800  error_codes[pos_start_impl(nullptr)] = err_code;
1801  }
1802 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t row_number_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 2235 of file RuntimeFunctions.cpp.

2235  {
2236  return reinterpret_cast<const int64_t*>(output_buff)[pos];
2237 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool sample_ratio ( const double  proportion,
const int64_t  row_offset 
)

Definition at line 2106 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::set< shared::TableKey > >::visit().

2108  {
2109  const int64_t threshold = 4294967296 * proportion;
2110  return (row_offset * 2654435761) % 4294967296 < threshold;
2111 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_not_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 221 of file RuntimeFunctions.cpp.

223  {
224  int64_t tmp = scale >> 1;
225  tmp = operand >= 0 ? operand + tmp : operand - tmp;
226  return tmp / scale;
227 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 207 of file RuntimeFunctions.cpp.

209  {
210  // rounded scale down of a decimal
211  if (operand == null_val) {
212  return null_val;
213  }
214 
215  int64_t tmp = scale >> 1;
216  tmp = operand >= 0 ? operand + tmp : operand - tmp;
217  return tmp / scale;
218 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_up ( const int64_t  operand,
const uint64_t  scale,
const int64_t  operand_null_val,
const int64_t  result_null_val 
)

Definition at line 199 of file RuntimeFunctions.cpp.

202  {
203  return operand != operand_null_val ? operand * scale : result_null_val;
204 }
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  entry_count 
)

Definition at line 2030 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2034  {
2035  if (groups_buffer[hashed_index] == EMPTY_KEY_64) {
2036  for (uint32_t i = 0; i < key_count; i++) {
2037  groups_buffer[i * entry_count + hashed_index] = key[i];
2038  }
2039  }
2040 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE StringView string_pack ( const int8_t *  ptr,
const int32_t  len 
)

Definition at line 2066 of file RuntimeFunctions.cpp.

2067  {
2068  return {reinterpret_cast<char const*>(ptr), static_cast<uint64_t>(len)};
2069 }
GPU_RT_STUB void sync_threadblock ( )

Definition at line 1749 of file RuntimeFunctions.cpp.

1749 {}
GPU_RT_STUB void sync_warp ( )

Definition at line 1747 of file RuntimeFunctions.cpp.

1747 {}
GPU_RT_STUB void sync_warp_protected ( int64_t  thread_pos,
int64_t  row_count 
)

Definition at line 1748 of file RuntimeFunctions.cpp.

1748 {}
GPU_RT_STUB int8_t thread_warp_idx ( const int8_t  warp_sz)

Definition at line 1773 of file RuntimeFunctions.cpp.

1773  {
1774  return 0;
1775 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 2114 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::set< shared::TableKey > >::visit(), width_bucket_expr(), and width_bucket_nullable().

2118  {
2119  if (target_value < lower_bound) {
2120  return 0;
2121  } else if (target_value >= upper_bound) {
2122  return partition_count + 1;
2123  }
2124  return ((target_value - lower_bound) * scale_factor) + 1;
2125 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 2187 of file RuntimeFunctions.cpp.

References width_bucket(), and width_bucket_reversed().

Referenced by CodeGenerator::codegen(), getExpressionRange(), ScalarExprVisitor< std::set< shared::TableKey > >::visit(), and width_bucket_expr_nullable().

2191  {
2192  if (reversed) {
2193  return width_bucket_reversed(target_value,
2194  lower_bound,
2195  upper_bound,
2196  partition_count / (lower_bound - upper_bound),
2197  partition_count);
2198  }
2199  return width_bucket(target_value,
2200  lower_bound,
2201  upper_bound,
2202  partition_count / (upper_bound - lower_bound),
2203  partition_count);
2204 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_no_oob_check ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 2221 of file RuntimeFunctions.cpp.

References width_bucket_no_oob_check(), and width_bucket_reversed_no_oob_check().

2225  {
2226  if (reversed) {
2228  target_value, lower_bound, partition_count / (lower_bound - upper_bound));
2229  }
2231  target_value, lower_bound, partition_count / (upper_bound - lower_bound));
2232 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_nullable ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2207 of file RuntimeFunctions.cpp.

References width_bucket_expr().

2212  {
2213  if (target_value == null_val) {
2214  return INT32_MIN;
2215  }
2216  return width_bucket_expr(
2217  target_value, reversed, lower_bound, upper_bound, partition_count);
2218 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr(const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 2173 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

2175  {
2176  return ((target_value - lower_bound) * scale_factor) + 1;
2177 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2142 of file RuntimeFunctions.cpp.

References width_bucket().

2147  {
2148  if (target_value == null_val) {
2149  return INT32_MIN;
2150  }
2151  return width_bucket(
2152  target_value, lower_bound, upper_bound, scale_factor, partition_count);
2153 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 2128 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr(), and width_bucket_reversed_nullable().

2132  {
2133  if (target_value > lower_bound) {
2134  return 0;
2135  } else if (target_value <= upper_bound) {
2136  return partition_count + 1;
2137  }
2138  return ((lower_bound - target_value) * scale_factor) + 1;
2139 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 2180 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

2182  {
2183  return ((lower_bound - target_value) * scale_factor) + 1;
2184 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_reversed_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2156 of file RuntimeFunctions.cpp.

References width_bucket_reversed().

2161  {
2162  if (target_value == null_val) {
2163  return INT32_MIN;
2164  }
2165  return width_bucket_reversed(
2166  target_value, lower_bound, upper_bound, scale_factor, partition_count);
2167 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

GPU_RT_STUB void write_back_non_grouped_agg ( int64_t *  input_buffer,
int64_t *  output_buffer,
const int32_t  num_agg_cols 
)

Definition at line 1751 of file RuntimeFunctions.cpp.

1753  {};
RUNTIME_EXPORT NEVER_INLINE void write_back_nop ( int64_t *  dest,
int64_t *  src,
const int32_t  sz 
)

Definition at line 1816 of file RuntimeFunctions.cpp.

1818  {
1819 #ifndef _WIN32
1820  // the body is not really needed, just make sure the call is not optimized away
1821  assert(dest);
1822 #endif
1823 }