OmniSciDB  ca0c39ec8f
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
RuntimeFunctions.cpp File Reference
#include "RuntimeFunctions.h"
#include "../Shared/Datum.h"
#include "../Shared/funcannotations.h"
#include "BufferCompaction.h"
#include "HyperLogLogRank.h"
#include "MurmurHash.h"
#include "Shared/quantile.h"
#include "TypePunning.h"
#include "Utils/SegmentTreeUtils.h"
#include <algorithm>
#include <atomic>
#include <chrono>
#include <cmath>
#include <cstdio>
#include <cstring>
#include <thread>
#include <tuple>
#include "DecodersImpl.h"
#include "GeoOpsRuntime.cpp"
#include "GroupByRuntime.cpp"
#include "JoinHashTable/Runtime/JoinHashTableQueryRuntime.cpp"
#include "TopKRuntime.cpp"
+ Include dependency graph for RuntimeFunctions.cpp:

Go to the source code of this file.

Macros

#define DEF_ARITH_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_SAFE_DIV_NULLABLE(type, null_type, opname)
 
#define DEF_BINARY_NULLABLE_ALL_OPS(type, null_type)
 
#define DEF_MAP_STRING_TO_DATUM(value_type, value_name)
 
#define DEF_UMINUS_NULLABLE(type, null_type)
 
#define DEF_CAST_NULLABLE(from_type, to_type)
 
#define DEF_CAST_SCALED_NULLABLE(from_type, to_type)
 
#define DEF_CAST_NULLABLE_BIDIR(type1, type2)
 
#define DEF_ROUND_NULLABLE(from_type, to_type)
 
#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))
 
#define DEF_COMPUTE_LOWER_BOUND(value_type)
 
#define DEF_COMPUTE_UPPER_BOUND(value_type)
 
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME(value_type)
 
#define DEF_COMPUTE_LOWER_BOUND_FROM_ORDERED_INDEX(value_type)
 
#define DEF_COMPUTE_LOWER_BOUND_FROM_ORDERED_INDEX_FOR_TIMEINTERVAL(value_type)
 
#define DEF_COMPUTE_UPPER_BOUND_FROM_ORDERED_INDEX(value_type)
 
#define DEF_COMPUTE_UPPER_BOUND_FROM_ORDERED_INDEX_FOR_TIMEINTERVAL(value_type)
 
#define DEF_GET_VALUE_IN_FRAME(value_type, logical_type)
 
#define DEF_RANGE_MODE_FRAME_LOWER_BOUND(value_type, opname, opsym)
 
#define DEF_RANGE_MODE_FRAME_UPPER_BOUND(value_type, opname, opsym)
 
#define DEF_COMPUTE_FRAME_MIN(agg_value_type)
 
#define DEF_COMPUTE_FRAME_MAX(agg_value_type)
 
#define DEF_COMPUTE_FRAME_SUM(agg_value_type)
 
#define DEF_COMPUTE_SUM_AND_COUNT_FRAME_AGGREGATES(agg_value_type)
 
#define DEF_SEARCH_AGGREGATION_TREE(agg_value_type)
 
#define DEF_SEARCH_DERIVED_AGGREGATION_TREE(agg_value_type)
 
#define DEF_HANDLE_NULL_FOR_WINDOW_FRAMING_AGG(agg_type, null_type)
 
#define DEF_AGG_MAX_INT(n)
 
#define DEF_AGG_MIN_INT(n)
 
#define DEF_AGG_ID_INT(n)
 
#define DEF_CHECKED_SINGLE_AGG_ID_INT(n)
 
#define DEF_WRITE_PROJECTION_INT(n)
 
#define DEF_SKIP_AGG_ADD(base_agg_func)
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DATA_T   int64_t
 
#define DATA_T   int32_t
 
#define DATA_T   int16_t
 
#define DATA_T   int8_t
 
#define DEF_SKIP_AGG_ADD(base_agg_func)
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DATA_T   double
 
#define ADDR_T   int64_t
 
#define DATA_T   float
 
#define ADDR_T   int32_t
 
#define DEF_SHARED_AGG_RET_STUBS(base_agg_func)
 
#define DEF_SHARED_AGG_STUBS(base_agg_func)
 

Functions

RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_up (const int64_t operand, const uint64_t scale, const int64_t operand_null_val, const int64_t result_null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_not_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_lhs (const int64_t dividend, const int64_t divisor)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_nullable_lhs (const int64_t dividend, const int64_t divisor, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not (const int8_t operand, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count (uint64_t *agg, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap (int64_t *agg, const int64_t val, const int64_t min_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct (int64_t *agg, const int64_t key, const uint32_t b)
 
GPU_RT_STUB void agg_approximate_count_distinct_gpu (int64_t *, const int64_t, const uint32_t, const int64_t, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set (const int64_t bitset, const int64_t val, const int64_t min_val, const int64_t max_val, const int64_t null_val, const int8_t null_bool_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
get_valid_buf_start_pos (const int64_t null_start_pos, const int64_t null_end_pos)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
get_valid_buf_end_pos (const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
encode_date (int64_t decoded_val, int64_t null_val, int64_t multiplier)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_start_index_sub (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_start_index_add (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound, int64_t num_current_partition_elem)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_end_index_sub (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_end_index_add (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound, int64_t num_current_partition_elem)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_integer_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
double * 
get_double_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
SumAndCountPair< int64_t > * 
get_integer_derived_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
SumAndCountPair< double > * 
get_double_derived_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal (size_t level, size_t tree_fanout)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int8_t * 
agg_id_varlen (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id (int64_t *agg, const int64_t val, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val (int64_t *agg, const int64_t val, const int64_t min_val, const int64_t skip_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32 (uint32_t *agg, const int32_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32 (int32_t *agg, const int32_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_skip_val (int64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32_skip_val (int32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_skip_val (uint64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32_skip_val (uint32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double (uint64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_double (int64_t *agg, const double val, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float (uint32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_float (int32_t *agg, const float val, const float null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double_skip_val (uint64_t *agg, const double val, const double skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float_skip_val (uint32_t *agg, const float val, const float skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_floor (const int64_t x, const int64_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_ceil (const int64_t x, const int64_t scale)
 
GPU_RT_STUB int8_t * agg_id_varlen_shared (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
GPU_RT_STUB int32_t checked_single_agg_id_shared (int64_t *agg, const int64_t val, const int64_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared (int32_t *agg, const int32_t val, const int32_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared (int16_t *agg, const int16_t val, const int16_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared (int8_t *agg, const int8_t val, const int8_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_double_shared (int64_t *agg, const double val, const double null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_float_shared (int32_t *agg, const float val, const float null_val)
 
GPU_RT_STUB void agg_max_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_max_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_min_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_min_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_id_double_shared_slow (int64_t *agg, const double *val)
 
GPU_RT_STUB int64_t agg_sum_shared (int64_t *agg, const int64_t val)
 
GPU_RT_STUB int64_t agg_sum_skip_val_shared (int64_t *agg, const int64_t val, const int64_t skip_val)
 
GPU_RT_STUB int32_t agg_sum_int32_shared (int32_t *agg, const int32_t val)
 
GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared (int32_t *agg, const int32_t val, const int32_t skip_val)
 
GPU_RT_STUB void agg_sum_double_shared (int64_t *agg, const double val)
 
GPU_RT_STUB void agg_sum_double_skip_val_shared (int64_t *agg, const double val, const double skip_val)
 
GPU_RT_STUB void agg_sum_float_shared (int32_t *agg, const float val)
 
GPU_RT_STUB void agg_sum_float_skip_val_shared (int32_t *agg, const float val, const float skip_val)
 
GPU_RT_STUB void force_sync ()
 
GPU_RT_STUB void sync_warp ()
 
GPU_RT_STUB void sync_warp_protected (int64_t thread_pos, int64_t row_count)
 
GPU_RT_STUB void sync_threadblock ()
 
GPU_RT_STUB void write_back_non_grouped_agg (int64_t *input_buffer, int64_t *output_buffer, const int32_t num_agg_cols)
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl (int32_t *error_code)
 
RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ()
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ()
 
GPU_RT_STUB int8_t thread_warp_idx (const int8_t warp_sz)
 
GPU_RT_STUB int64_t get_thread_index ()
 
GPU_RT_STUB int64_t * declare_dynamic_shared_memory ()
 
GPU_RT_STUB int64_t get_block_index ()
 
RUNTIME_EXPORT ALWAYS_INLINE void record_error_code (const int32_t err_code, int32_t *error_codes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_error_code (int32_t *error_codes)
 
RUNTIME_EXPORT NEVER_INLINE
const int64_t * 
init_shared_mem_nop (const int64_t *groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void write_back_nop (int64_t *dest, int64_t *src, const int32_t sz)
 
RUNTIME_EXPORT int64_t * init_shared_mem (const int64_t *global_groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const bool blocks_share_memory, const int32_t frag_idx)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
template<typename T >
ALWAYS_INLINE int64_t * get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width, const uint32_t row_size_quad)
 
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_columnar (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_qw_count, const size_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash_keyless (int64_t *groups_buffer, const uint32_t hashed_index, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless_semiprivate (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad, const uint8_t thread_warp_idx, const uint8_t warp_size)
 
RUNTIME_EXPORT ALWAYS_INLINE
int8_t * 
extract_str_ptr (const uint64_t str_and_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
extract_str_len (const uint64_t str_and_len)
 
RUNTIME_EXPORT NEVER_INLINE
int8_t * 
extract_str_ptr_noinline (const uint64_t str_and_len)
 
RUNTIME_EXPORT NEVER_INLINE int32_t extract_str_len_noinline (const uint64_t str_and_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
string_pack (const int8_t *ptr, const int32_t len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length (const char *str, const int32_t str_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length_nullable (const char *str, const int32_t str_len, const int32_t int_null)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
key_for_string_encoded (const int32_t str_id)
 
ALWAYS_INLINE DEVICE int32_t map_string_dict_id (const int32_t string_id, const int64_t translation_map_handle, const int32_t min_source_id)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
sample_ratio (const double proportion, const int64_t row_offset)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_reversed_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_nullable (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_no_oob_check (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
row_number_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_double (const int64_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE float load_float (const int32_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int (const int64_t *sum, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal (const int64_t *sum, const int64_t *count, const double null_val, const uint32_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double (const int64_t *agg, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float (const int32_t *agg, const int32_t *count, const double null_val)
 
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count (uint8_t *bitmap, const uint32_t bitmap_bytes, const uint8_t *key_bytes, const uint32_t key_len)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals (const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)
 
RUNTIME_EXPORT void multifrag_query_hoisted_literals (const int8_t ***col_buffers, const uint64_t *num_fragments, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, int32_t *total_matched, const int64_t *init_agg_value, int64_t **out, int32_t *error_code, const uint32_t *num_tables_ptr, const int64_t *join_hash_tables)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub (const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)
 
RUNTIME_EXPORT void multifrag_query (const int8_t ***col_buffers, const uint64_t *num_fragments, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, int32_t *total_matched, const int64_t *init_agg_value, int64_t **out, int32_t *error_code, const uint32_t *num_tables_ptr, const int64_t *join_hash_tables)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
check_interrupt ()
 
RUNTIME_EXPORT bool check_interrupt_init (unsigned command)
 

Macro Definition Documentation

#define ADDR_T   int64_t

Definition at line 1578 of file RuntimeFunctions.cpp.

#define ADDR_T   int32_t

Definition at line 1578 of file RuntimeFunctions.cpp.

#define DATA_T   int64_t

Definition at line 1577 of file RuntimeFunctions.cpp.

#define DATA_T   int32_t

Definition at line 1577 of file RuntimeFunctions.cpp.

#define DATA_T   int16_t

Definition at line 1577 of file RuntimeFunctions.cpp.

#define DATA_T   int8_t

Definition at line 1577 of file RuntimeFunctions.cpp.

#define DATA_T   double

Definition at line 1577 of file RuntimeFunctions.cpp.

#define DATA_T   float

Definition at line 1577 of file RuntimeFunctions.cpp.

#define DEF_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_id_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = val; \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1308 of file RuntimeFunctions.cpp.

#define DEF_AGG_MAX_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_max_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::max(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1286 of file RuntimeFunctions.cpp.

#define DEF_AGG_MIN_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_min_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::min(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1297 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 46 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_lhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 55 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_rhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 64 of file RuntimeFunctions.cpp.

#define DEF_BINARY_NULLABLE_ALL_OPS (   type,
  null_type 
)

Definition at line 118 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val ? to_null_val : operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 262 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE_BIDIR (   type1,
  type2 
)
Value:
DEF_CAST_NULLABLE(type1, type2) \
DEF_CAST_NULLABLE(type2, type1)
#define DEF_CAST_NULLABLE(from_type, to_type)

Definition at line 279 of file RuntimeFunctions.cpp.

#define DEF_CAST_SCALED_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_scaled_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val, \
const to_type multiplier) { \
return operand == from_null_val ? to_null_val : multiplier * operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 270 of file RuntimeFunctions.cpp.

#define DEF_CHECKED_SINGLE_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_int##n( \
int##n##_t* agg, const int##n##_t val, const int##n##_t null_val) { \
if (val == null_val) { \
return 0; \
} \
if (*agg == val) { \
return 0; \
} else if (*agg == null_val) { \
*agg = val; \
return 0; \
} else { \
/* see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES*/ \
return 15; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1314 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 73 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_lhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 85 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_rhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 97 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME (   value_type)

Definition at line 485 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_FRAME_MAX (   agg_value_type)

Definition at line 913 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_FRAME_MIN (   agg_value_type)

Definition at line 854 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_FRAME_SUM (   agg_value_type)

Definition at line 972 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_LOWER_BOUND (   value_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_##value_type##_lower_bound( \
const int64_t entry_cnt, \
const value_type target_value, \
const value_type* col_buf) { \
int64_t l = 0; \
int64_t h = entry_cnt - 1; \
while (l < h) { \
int64_t mid = l + (h - l) / 2; \
if (target_value < col_buf[mid]) { \
h = mid; \
} else { \
l = mid + 1; \
} \
} \
return l; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 423 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_LOWER_BOUND_FROM_ORDERED_INDEX (   value_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
compute_##value_type##_lower_bound_from_ordered_index( \
const int64_t num_elems, \
const value_type target_value, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const value_type null_val, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_start_pos; \
} \
int64_t l = get_valid_buf_start_pos(null_start_pos, null_end_pos); \
int64_t h = get_valid_buf_end_pos(num_elems, null_start_pos, null_end_pos); \
while (l < h) { \
int64_t mid = l + (h - l) / 2; \
if (target_value <= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) { \
h = mid; \
} else { \
l = mid + 1; \
} \
} \
return h; \
}
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
#define RUNTIME_EXPORT
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
#define ALWAYS_INLINE

Definition at line 531 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_LOWER_BOUND_FROM_ORDERED_INDEX_FOR_TIMEINTERVAL (   value_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
compute_##value_type##_lower_bound_from_ordered_index_for_timeinterval( \
const int64_t num_elems, \
const int64_t target_value, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t null_val, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_start_pos; \
} \
int64_t l = get_valid_buf_start_pos(null_start_pos, null_end_pos); \
int64_t h = get_valid_buf_end_pos(num_elems, null_start_pos, null_end_pos); \
while (l < h) { \
int64_t mid = l + (h - l) / 2; \
if (target_value <= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) { \
h = mid; \
} else { \
l = mid + 1; \
} \
} \
return h; \
}
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
#define RUNTIME_EXPORT
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
#define ALWAYS_INLINE

Definition at line 565 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_SUM_AND_COUNT_FRAME_AGGREGATES (   agg_value_type)

Definition at line 1033 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_UPPER_BOUND (   value_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_##value_type##_upper_bound( \
const int64_t entry_cnt, \
const value_type target_value, \
const value_type* col_buf) { \
int64_t l = 0; \
int64_t h = entry_cnt - 1; \
while (l < h) { \
int64_t mid = l + (h - l) / 2; \
if (target_value >= col_buf[mid]) { \
h = mid; \
} else { \
l = mid + 1; \
} \
} \
return l; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 448 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_UPPER_BOUND_FROM_ORDERED_INDEX (   value_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
compute_##value_type##_upper_bound_from_ordered_index( \
const int64_t num_elems, \
const value_type target_value, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const value_type null_val, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_end_pos; \
} \
int64_t l = get_valid_buf_start_pos(null_start_pos, null_end_pos); \
int64_t h = get_valid_buf_end_pos(num_elems, null_start_pos, null_end_pos); \
while (l < h) { \
int64_t mid = l + (h - l) / 2; \
if (target_value >= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) { \
l = mid + 1; \
} else { \
h = mid; \
} \
} \
return l; \
}
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
#define RUNTIME_EXPORT
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
#define ALWAYS_INLINE

Definition at line 596 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_UPPER_BOUND_FROM_ORDERED_INDEX_FOR_TIMEINTERVAL (   value_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
compute_##value_type##_upper_bound_from_ordered_index_for_timeinterval( \
const int64_t num_elems, \
const int64_t target_value, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t null_val, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_end_pos; \
} \
int64_t l = get_valid_buf_start_pos(null_start_pos, null_end_pos); \
int64_t h = get_valid_buf_end_pos(num_elems, null_start_pos, null_end_pos); \
while (l < h) { \
int64_t mid = l + (h - l) / 2; \
if (target_value >= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) { \
l = mid + 1; \
} else { \
h = mid; \
} \
} \
return l; \
}
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
#define RUNTIME_EXPORT
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
#define ALWAYS_INLINE

Definition at line 630 of file RuntimeFunctions.cpp.

#define DEF_GET_VALUE_IN_FRAME (   value_type,
  logical_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE logical_type \
get_##value_type##_value_##logical_type##_type_in_frame( \
const int64_t target_row_idx_in_frame, \
const int64_t frame_start_offset, \
const int64_t frame_end_offset, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const logical_type logical_null_val, \
const logical_type col_null_val) { \
if (target_row_idx_in_frame < frame_start_offset || \
target_row_idx_in_frame >= frame_end_offset) { \
return logical_null_val; \
} \
const auto target_offset = \
partition_rowid_buf[ordered_index_buf[target_row_idx_in_frame]]; \
logical_type target_val = col_buf[target_offset]; \
if (target_val == col_null_val) { \
target_val = logical_null_val; \
} \
return target_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 661 of file RuntimeFunctions.cpp.

#define DEF_HANDLE_NULL_FOR_WINDOW_FRAMING_AGG (   agg_type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE agg_type \
handle_null_val_##agg_type##_##null_type##_window_framing_agg( \
agg_type res, null_type agg_null_val, agg_type input_col_null_val) { \
if (res == agg_null_val) { \
return input_col_null_val; \
} \
return res; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1196 of file RuntimeFunctions.cpp.

#define DEF_MAP_STRING_TO_DATUM (   value_type,
  value_name 
)
Value:
extern "C" ALWAYS_INLINE DEVICE value_type map_string_to_datum_##value_name( \
const int32_t string_id, \
const int64_t translation_map_handle, \
const int32_t min_source_id) { \
const Datum* translation_map = \
reinterpret_cast<const Datum*>(translation_map_handle); \
const Datum& out_datum = translation_map[string_id - min_source_id]; \
return out_datum.value_name##val; \
}
#define DEVICE
#define ALWAYS_INLINE
Definition: Datum.h:44

Definition at line 179 of file RuntimeFunctions.cpp.

#define DEF_RANGE_MODE_FRAME_LOWER_BOUND (   value_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
range_mode_##value_type##_##opname##_frame_lower_bound( \
const int64_t num_elems, \
const value_type target_value, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t frame_bound_val, \
const value_type null_val, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_start_pos; \
} else { \
value_type new_val = target_value opsym frame_bound_val; \
return compute_##value_type##_lower_bound_from_ordered_index(num_elems, \
new_val, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
null_start_pos, \
null_end_pos); \
} \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 738 of file RuntimeFunctions.cpp.

#define DEF_RANGE_MODE_FRAME_UPPER_BOUND (   value_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
range_mode_##value_type##_##opname##_frame_upper_bound( \
const int64_t num_elems, \
const value_type target_value, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t frame_bound_val, \
const value_type null_val, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_end_pos; \
} else { \
value_type new_val = target_value opsym frame_bound_val; \
return compute_##value_type##_upper_bound_from_ordered_index(num_elems, \
new_val, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
null_start_pos, \
null_end_pos); \
} \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 778 of file RuntimeFunctions.cpp.

#define DEF_ROUND_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val \
? to_null_val \
: static_cast<to_type>(operand + (operand < from_type(0) \
? from_type(-0.5) \
: from_type(0.5))); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 283 of file RuntimeFunctions.cpp.

#define DEF_SAFE_DIV_NULLABLE (   type,
  null_type,
  opname 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type safe_div_##type( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val && rhs != 0) { \
return lhs / rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 109 of file RuntimeFunctions.cpp.

#define DEF_SEARCH_AGGREGATION_TREE (   agg_value_type)

Definition at line 1103 of file RuntimeFunctions.cpp.

#define DEF_SEARCH_DERIVED_AGGREGATION_TREE (   agg_value_type)

Definition at line 1154 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_RET_STUBS (   base_agg_func)

Definition at line 1606 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_STUBS (   base_agg_func)
Value:
extern "C" GPU_RT_STUB void base_agg_func##_shared(int64_t* agg, const int64_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_skip_val_shared( \
int64_t* agg, const int64_t val, const int64_t skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int32_shared(int32_t* agg, \
const int32_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int16_shared(int16_t* agg, \
const int16_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int8_shared(int8_t* agg, \
const int8_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_int32_skip_val_shared( \
int32_t* agg, const int32_t val, const int32_t skip_val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_shared(int64_t* agg, \
const double val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_skip_val_shared( \
int64_t* agg, const double val, const double skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_float_shared(int32_t* agg, \
const float val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_float_skip_val_shared( \
int32_t* agg, const float val, const float skip_val) {}
#define GPU_RT_STUB

Definition at line 1645 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const DATA_T old_agg = *agg; \
if (old_agg != skip_val) { \
base_agg_func(agg, val); \
} else { \
*agg = val; \
} \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1556 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const ADDR_T old_agg = *agg; \
if (old_agg != *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&skip_val))) { \
base_agg_func(agg, val); \
} else { \
*agg = *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&val)); \
} \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1556 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG_ADD (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
base_agg_func(agg, val); \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1548 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG_ADD (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
base_agg_func(agg, val); \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1548 of file RuntimeFunctions.cpp.

#define DEF_UMINUS_NULLABLE (   type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type uminus_##type##_nullable( \
const type operand, const null_type null_val) { \
return operand == null_val ? null_val : -operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 247 of file RuntimeFunctions.cpp.

#define DEF_WRITE_PROJECTION_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void write_projection_int##n( \
int8_t* slot_ptr, const int##n##_t val, const int64_t init_val) { \
if (val != init_val) { \
*reinterpret_cast<int##n##_t*>(slot_ptr) = val; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1342 of file RuntimeFunctions.cpp.

#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))

Definition at line 375 of file RuntimeFunctions.cpp.

Function Documentation

RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct ( int64_t *  agg,
const int64_t  key,
const uint32_t  b 
)

Definition at line 387 of file RuntimeFunctions.cpp.

References get_rank(), and MurmurHash64A().

387  {
388  const uint64_t hash = MurmurHash64A(&key, sizeof(key), 0);
389  const uint32_t index = hash >> (64 - b);
390  const uint8_t rank = get_rank(hash << b, 64 - b);
391  uint8_t* M = reinterpret_cast<uint8_t*>(*agg);
392  M[index] = std::max(M[index], rank);
393 }
FORCE_INLINE uint8_t get_rank(uint64_t x, uint32_t b)
RUNTIME_EXPORT NEVER_INLINE DEVICE uint64_t MurmurHash64A(const void *key, int len, uint64_t seed)
Definition: MurmurHash.cpp:27

+ Here is the call graph for this function:

GPU_RT_STUB void agg_approximate_count_distinct_gpu ( int64_t *  ,
const int64_t  ,
const uint32_t  ,
const int64_t  ,
const int64_t   
)

Definition at line 395 of file RuntimeFunctions.cpp.

399  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count ( uint64_t *  agg,
const int64_t   
)

Definition at line 362 of file RuntimeFunctions.cpp.

Referenced by agg_count_skip_val(), and anonymous_namespace{GroupByAndAggregate.cpp}::get_agg_count().

362  {
363  return (*agg)++;
364 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val 
)

Definition at line 367 of file RuntimeFunctions.cpp.

Referenced by agg_count_distinct_bitmap_skip_val(), WindowFunctionContext::fillPartitionEnd(), WindowFunctionContext::fillPartitionStart(), anonymous_namespace{WindowContext.cpp}::index_to_partition_end(), and InValuesBitmap::InValuesBitmap().

367  {
368  const uint64_t bitmap_idx = val - min_val;
369  reinterpret_cast<int8_t*>(*agg)[bitmap_idx >> 3] |= (1 << (bitmap_idx & 7));
370 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 378 of file RuntimeFunctions.cpp.

384  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val,
const int64_t  skip_val 
)

Definition at line 1255 of file RuntimeFunctions.cpp.

References agg_count_distinct_bitmap().

1259  {
1260  if (val != skip_val) {
1261  agg_count_distinct_bitmap(agg, val, min_val);
1262  }
1263 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap(int64_t *agg, const int64_t val, const int64_t min_val)

+ Here is the call graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 1265 of file RuntimeFunctions.cpp.

1272  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double ( uint64_t *  agg,
const double  val 
)

Definition at line 1443 of file RuntimeFunctions.cpp.

Referenced by agg_count_double_skip_val().

1444  {
1445  return (*agg)++;
1446 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double_skip_val ( uint64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 1533 of file RuntimeFunctions.cpp.

References agg_count_double().

1533  {
1534  if (val != skip_val) {
1535  return agg_count_double(agg, val);
1536  }
1537  return *agg;
1538 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double(uint64_t *agg, const double val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float ( uint32_t *  agg,
const float  val 
)

Definition at line 1488 of file RuntimeFunctions.cpp.

Referenced by agg_count_float_skip_val().

1489  {
1490  return (*agg)++;
1491 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float_skip_val ( uint32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 1541 of file RuntimeFunctions.cpp.

References agg_count_float().

1541  {
1542  if (val != skip_val) {
1543  return agg_count_float(agg, val);
1544  }
1545  return *agg;
1546 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float(uint32_t *agg, const float val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32 ( uint32_t *  agg,
const int32_t   
)

Definition at line 1274 of file RuntimeFunctions.cpp.

Referenced by agg_count_int32_skip_val().

1275  {
1276  return (*agg)++;
1277 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32_skip_val ( uint32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1390 of file RuntimeFunctions.cpp.

References agg_count_int32().

1390  {
1391  if (val != skip_val) {
1392  return agg_count_int32(agg, val);
1393  }
1394  return *agg;
1395 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32(uint32_t *agg, const int32_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_skip_val ( uint64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1382 of file RuntimeFunctions.cpp.

References agg_count().

1382  {
1383  if (val != skip_val) {
1384  return agg_count(agg, val);
1385  }
1386  return *agg;
1387 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count(uint64_t *agg, const int64_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_id ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1224 of file RuntimeFunctions.cpp.

1224  {
1225  *agg = val;
1226 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double ( int64_t *  agg,
const double  val 
)

Definition at line 1466 of file RuntimeFunctions.cpp.

1467  {
1468  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
1469 }
GPU_RT_STUB void agg_id_double_shared_slow ( int64_t *  agg,
const double *  val 
)

Definition at line 1736 of file RuntimeFunctions.cpp.

1736 {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float ( int32_t *  agg,
const float  val 
)

Definition at line 1511 of file RuntimeFunctions.cpp.

1511  {
1512  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
1513 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t* agg_id_varlen ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 1228 of file RuntimeFunctions.cpp.

1231  {
1232  for (auto i = 0; i < size_bytes; i++) {
1233  varlen_buffer[offset + i] = value[i];
1234  }
1235  return &varlen_buffer[offset];
1236 }
GPU_RT_STUB int8_t* agg_id_varlen_shared ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 1676 of file RuntimeFunctions.cpp.

1679  {
1680  return nullptr;
1681 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1216 of file RuntimeFunctions.cpp.

1216  {
1217  *agg = std::max(*agg, val);
1218 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double ( int64_t *  agg,
const double  val 
)

Definition at line 1454 of file RuntimeFunctions.cpp.

1455  {
1456  const auto r = std::max(*reinterpret_cast<const double*>(agg), val);
1457  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
1458 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float ( int32_t *  agg,
const float  val 
)

Definition at line 1499 of file RuntimeFunctions.cpp.

1500  {
1501  const auto r = std::max(*reinterpret_cast<const float*>(agg), val);
1502  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
1503 }
GPU_RT_STUB void agg_max_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 1720 of file RuntimeFunctions.cpp.

1722  {}
GPU_RT_STUB void agg_max_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 1724 of file RuntimeFunctions.cpp.

1726  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_min ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1220 of file RuntimeFunctions.cpp.

1220  {
1221  *agg = std::min(*agg, val);
1222 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double ( int64_t *  agg,
const double  val 
)

Definition at line 1460 of file RuntimeFunctions.cpp.

1461  {
1462  const auto r = std::min(*reinterpret_cast<const double*>(agg), val);
1463  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
1464 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float ( int32_t *  agg,
const float  val 
)

Definition at line 1505 of file RuntimeFunctions.cpp.

1506  {
1507  const auto r = std::min(*reinterpret_cast<const float*>(agg), val);
1508  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
1509 }
GPU_RT_STUB void agg_min_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 1728 of file RuntimeFunctions.cpp.

1730  {}
GPU_RT_STUB void agg_min_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 1732 of file RuntimeFunctions.cpp.

1734  {}
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1210 of file RuntimeFunctions.cpp.

Referenced by agg_sum_skip_val().

1210  {
1211  const auto old = *agg;
1212  *agg += val;
1213  return old;
1214 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double ( int64_t *  agg,
const double  val 
)

Definition at line 1448 of file RuntimeFunctions.cpp.

1449  {
1450  const auto r = *reinterpret_cast<const double*>(agg) + val;
1451  *agg = *reinterpret_cast<const int64_t*>(may_alias_ptr(&r));
1452 }
GPU_RT_STUB void agg_sum_double_shared ( int64_t *  agg,
const double  val 
)

Definition at line 1757 of file RuntimeFunctions.cpp.

1757 {}
GPU_RT_STUB void agg_sum_double_skip_val_shared ( int64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 1759 of file RuntimeFunctions.cpp.

1761  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float ( int32_t *  agg,
const float  val 
)

Definition at line 1493 of file RuntimeFunctions.cpp.

1494  {
1495  const auto r = *reinterpret_cast<const float*>(agg) + val;
1496  *agg = *reinterpret_cast<const int32_t*>(may_alias_ptr(&r));
1497 }
GPU_RT_STUB void agg_sum_float_shared ( int32_t *  agg,
const float  val 
)

Definition at line 1762 of file RuntimeFunctions.cpp.

1762 {}
GPU_RT_STUB void agg_sum_float_skip_val_shared ( int32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 1764 of file RuntimeFunctions.cpp.

1766  {}
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32 ( int32_t *  agg,
const int32_t  val 
)

Definition at line 1279 of file RuntimeFunctions.cpp.

Referenced by agg_sum_int32_skip_val().

1280  {
1281  const auto old = *agg;
1282  *agg += val;
1283  return old;
1284 }

+ Here is the caller graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_shared ( int32_t *  agg,
const int32_t  val 
)

Definition at line 1747 of file RuntimeFunctions.cpp.

1747  {
1748  return 0;
1749 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32_skip_val ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1369 of file RuntimeFunctions.cpp.

References agg_sum_int32().

1369  {
1370  const auto old = *agg;
1371  if (val != skip_val) {
1372  if (old != skip_val) {
1373  return agg_sum_int32(agg, val);
1374  } else {
1375  *agg = val;
1376  }
1377  }
1378  return old;
1379 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32(int32_t *agg, const int32_t val)

+ Here is the call graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1751 of file RuntimeFunctions.cpp.

1753  {
1754  return 0;
1755 }
GPU_RT_STUB int64_t agg_sum_shared ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1738 of file RuntimeFunctions.cpp.

1738  {
1739  return 0;
1740 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1354 of file RuntimeFunctions.cpp.

References agg_sum().

Referenced by Executor::reduceResults().

1356  {
1357  const auto old = *agg;
1358  if (val != skip_val) {
1359  if (old != skip_val) {
1360  return agg_sum(agg, val);
1361  } else {
1362  *agg = val;
1363  }
1364  }
1365  return old;
1366 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum(int64_t *agg, const int64_t val)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t agg_sum_skip_val_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1742 of file RuntimeFunctions.cpp.

1744  {
1745  return 0;
1746 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set ( const int64_t  bitset,
const int64_t  val,
const int64_t  min_val,
const int64_t  max_val,
const int64_t  null_val,
const int8_t  null_bool_val 
)

Definition at line 401 of file RuntimeFunctions.cpp.

406  {
407  if (val == null_val) {
408  return null_bool_val;
409  }
410  if (val < min_val || val > max_val) {
411  return 0;
412  }
413  if (!bitset) {
414  return 0;
415  }
416  const uint64_t bitmap_idx = val - min_val;
417  return (reinterpret_cast<const int8_t*>(bitset))[bitmap_idx >> 3] &
418  (1 << (bitmap_idx & 7))
419  ? 1
420  : 0;
421 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length ( const char *  str,
const int32_t  str_len 
)

Definition at line 2124 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit().

2124  {
2125  return str_len;
2126 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length_nullable ( const char *  str,
const int32_t  str_len,
const int32_t  int_null 
)

Definition at line 2129 of file RuntimeFunctions.cpp.

2129  {
2130  if (!str) {
2131  return int_null;
2132  }
2133  return str_len;
2134 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool check_interrupt ( )

Definition at line 2423 of file RuntimeFunctions.cpp.

References check_interrupt_init(), INT_CHECK, and runtime_interrupt_flag.

2423  {
2424  if (check_interrupt_init(static_cast<unsigned>(INT_CHECK))) {
2425  return true;
2426  }
2427  return false;
2428 }
RUNTIME_EXPORT bool check_interrupt_init(unsigned command)

+ Here is the call graph for this function:

RUNTIME_EXPORT bool check_interrupt_init ( unsigned  command)

Definition at line 2430 of file RuntimeFunctions.cpp.

References INT_ABORT, INT_CHECK, INT_RESET, and runtime_interrupt_flag.

Referenced by check_interrupt(), Executor::interrupt(), and Executor::resetInterrupt().

2430  {
2431  static std::atomic_bool runtime_interrupt_flag{false};
2432 
2433  if (command == static_cast<unsigned>(INT_CHECK)) {
2434  if (runtime_interrupt_flag.load()) {
2435  return true;
2436  }
2437  return false;
2438  }
2439  if (command == static_cast<unsigned>(INT_ABORT)) {
2440  runtime_interrupt_flag.store(true);
2441  return false;
2442  }
2443  if (command == static_cast<unsigned>(INT_RESET)) {
2444  runtime_interrupt_flag.store(false);
2445  return false;
2446  }
2447  return false;
2448 }
__device__ int32_t runtime_interrupt_flag
Definition: cuda_mapd_rt.cu:95

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 1239 of file RuntimeFunctions.cpp.

1239  {
1240  if (val == null_val) {
1241  return 0;
1242  }
1243 
1244  if (*agg == val) {
1245  return 0;
1246  } else if (*agg == null_val) {
1247  *agg = val;
1248  return 0;
1249  } else {
1250  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1251  return 15;
1252  }
1253 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_double ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 1472 of file RuntimeFunctions.cpp.

1472  {
1473  if (val == null_val) {
1474  return 0;
1475  }
1476 
1477  if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)))) {
1478  return 0;
1479  } else if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&null_val)))) {
1480  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
1481  return 0;
1482  } else {
1483  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1484  return 15;
1485  }
1486 }
GPU_RT_STUB int32_t checked_single_agg_id_double_shared ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 1708 of file RuntimeFunctions.cpp.

1710  {
1711  return 0;
1712 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_float ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 1516 of file RuntimeFunctions.cpp.

1516  {
1517  if (val == null_val) {
1518  return 0;
1519  }
1520 
1521  if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)))) {
1522  return 0;
1523  } else if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&null_val)))) {
1524  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
1525  return 0;
1526  } else {
1527  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1528  return 15;
1529  }
1530 }
GPU_RT_STUB int32_t checked_single_agg_id_float_shared ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 1714 of file RuntimeFunctions.cpp.

1716  {
1717  return 0;
1718 }
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  null_val 
)

Definition at line 1696 of file RuntimeFunctions.cpp.

1698  {
1699  return 0;
1700 }
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  null_val 
)

Definition at line 1690 of file RuntimeFunctions.cpp.

1692  {
1693  return 0;
1694 }
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  null_val 
)

Definition at line 1701 of file RuntimeFunctions.cpp.

1703  {
1704  return 0;
1705 }
GPU_RT_STUB int32_t checked_single_agg_id_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 1683 of file RuntimeFunctions.cpp.

1685  {
1686  return 0;
1687 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_end_index_add ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound,
int64_t  num_current_partition_elem 
)

Definition at line 730 of file RuntimeFunctions.cpp.

733  {
734  int64_t index = candidate_index - current_partition_start_offset + frame_bound;
735  return index >= num_current_partition_elem ? num_current_partition_elem : index + 1;
736 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_end_index_sub ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound 
)

Definition at line 722 of file RuntimeFunctions.cpp.

724  {
725  int64_t index = candidate_index - current_partition_start_offset - frame_bound;
726  return index < 0 ? 0 : index + 1;
727 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_start_index_add ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound,
int64_t  num_current_partition_elem 
)

Definition at line 713 of file RuntimeFunctions.cpp.

716  {
717  int64_t index = candidate_index - current_partition_start_offset + frame_bound;
718  return index >= num_current_partition_elem ? num_current_partition_elem : index;
719 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_start_index_sub ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound 
)

Definition at line 705 of file RuntimeFunctions.cpp.

707  {
708  int64_t index = candidate_index - current_partition_start_offset - frame_bound;
709  return index < 0 ? 0 : index;
710 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_ceil ( const int64_t  x,
const int64_t  scale 
)

Definition at line 1599 of file RuntimeFunctions.cpp.

References decimal_floor().

1600  {
1601  return decimal_floor(x, scale) + (x % scale ? scale : 0);
1602 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor(const int64_t x, const int64_t scale)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor ( const int64_t  x,
const int64_t  scale 
)

Definition at line 1588 of file RuntimeFunctions.cpp.

Referenced by decimal_ceil().

1589  {
1590  if (x >= 0) {
1591  return x / scale * scale;
1592  }
1593  if (!(x % scale)) {
1594  return x;
1595  }
1596  return x / scale * scale - scale;
1597 }

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t* declare_dynamic_shared_memory ( )

Definition at line 1804 of file RuntimeFunctions.cpp.

1804  {
1805  return nullptr;
1806 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t encode_date ( int64_t  decoded_val,
int64_t  null_val,
int64_t  multiplier 
)

Definition at line 698 of file RuntimeFunctions.cpp.

700  {
701  return decoded_val == null_val ? decoded_val : decoded_val * multiplier;
702 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t extract_str_len ( const uint64_t  str_and_len)

Definition at line 2095 of file RuntimeFunctions.cpp.

Referenced by extract_str_len_noinline().

2095  {
2096  return static_cast<int64_t>(str_and_len) >> 48;
2097 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE int32_t extract_str_len_noinline ( const uint64_t  str_and_len)

Definition at line 2105 of file RuntimeFunctions.cpp.

References extract_str_len().

Referenced by string_compress().

2105  {
2106  return extract_str_len(str_and_len);
2107 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t extract_str_len(const uint64_t str_and_len)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int8_t* extract_str_ptr ( const uint64_t  str_and_len)

Definition at line 2089 of file RuntimeFunctions.cpp.

Referenced by extract_str_ptr_noinline().

2090  {
2091  return reinterpret_cast<int8_t*>(str_and_len & 0xffffffffffff);
2092 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE int8_t* extract_str_ptr_noinline ( const uint64_t  str_and_len)

Definition at line 2099 of file RuntimeFunctions.cpp.

References extract_str_ptr().

Referenced by string_compress().

2100  {
2101  return extract_str_ptr(str_and_len);
2102 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t * extract_str_ptr(const uint64_t str_and_len)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs ( const int64_t  dividend,
const int64_t  divisor 
)

Definition at line 233 of file RuntimeFunctions.cpp.

Referenced by floor_div_nullable_lhs().

234  {
235  return (dividend < 0 ? dividend - (divisor - 1) : dividend) / divisor;
236 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_nullable_lhs ( const int64_t  dividend,
const int64_t  divisor,
const int64_t  null_val 
)

Definition at line 241 of file RuntimeFunctions.cpp.

References floor_div_lhs().

243  {
244  return dividend == null_val ? null_val : floor_div_lhs(dividend, divisor);
245 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs(const int64_t dividend, const int64_t divisor)

+ Here is the call graph for this function:

GPU_RT_STUB void force_sync ( )

Definition at line 1768 of file RuntimeFunctions.cpp.

1768 {}
GPU_RT_STUB int64_t get_block_index ( )

Definition at line 1808 of file RuntimeFunctions.cpp.

1808  {
1809  return 0;
1810 }
RUNTIME_EXPORT ALWAYS_INLINE double* get_double_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 824 of file RuntimeFunctions.cpp.

826  {
827  double** casted_aggregation_trees = reinterpret_cast<double**>(aggregation_trees);
828  return casted_aggregation_trees[partition_idx];
829 }
RUNTIME_EXPORT ALWAYS_INLINE SumAndCountPair<double>* get_double_derived_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 839 of file RuntimeFunctions.cpp.

839  {
840  SumAndCountPair<double>** casted_aggregation_trees =
841  reinterpret_cast<SumAndCountPair<double>**>(aggregation_trees);
842  return casted_aggregation_trees[partition_idx];
843 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_error_code ( int32_t *  error_codes)

Definition at line 1827 of file RuntimeFunctions.cpp.

References pos_start_impl().

1827  {
1828  return error_codes[pos_start_impl(nullptr)];
1829 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad 
)

Definition at line 2069 of file RuntimeFunctions.cpp.

2074  {
2075  return groups_buffer + row_size_quad * (key - min_key);
2076 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless_semiprivate ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad,
const uint8_t  thread_warp_idx,
const uint8_t  warp_size 
)

Definition at line 2078 of file RuntimeFunctions.cpp.

2085  {
2086  return groups_buffer + row_size_quad * (warp_size * (key - min_key) + thread_warp_idx);
2087 }
__device__ int8_t thread_warp_idx(const int8_t warp_sz)
Definition: cuda_mapd_rt.cu:39
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_integer_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 818 of file RuntimeFunctions.cpp.

820  {
821  return aggregation_trees[partition_idx];
822 }
RUNTIME_EXPORT ALWAYS_INLINE SumAndCountPair<int64_t>* get_integer_derived_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 832 of file RuntimeFunctions.cpp.

832  {
833  SumAndCountPair<int64_t>** casted_aggregation_trees =
834  reinterpret_cast<SumAndCountPair<int64_t>**>(aggregation_trees);
835  return casted_aggregation_trees[partition_idx];
836 }
template<typename T >
ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const T *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 1897 of file RuntimeFunctions.cpp.

References align_to_int64(), and heavydb.dtypes::T.

1901  {
1902  auto off = h * row_size_quad;
1903  auto row_ptr = reinterpret_cast<T*>(groups_buffer + off);
1904  if (*row_ptr == get_empty_key<T>()) {
1905  memcpy(row_ptr, key, key_count * sizeof(T));
1906  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1907  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1908  }
1909  if (memcmp(row_ptr, key, key_count * sizeof(T)) == 0) {
1910  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1911  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1912  }
1913  return nullptr;
1914 }
FORCE_INLINE HOST DEVICE T align_to_int64(T addr)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width,
const uint32_t  row_size_quad 
)

Definition at line 1916 of file RuntimeFunctions.cpp.

References get_matching_group_value().

1922  {
1923  switch (key_width) {
1924  case 4:
1925  return get_matching_group_value(groups_buffer,
1926  h,
1927  reinterpret_cast<const int32_t*>(key),
1928  key_count,
1929  row_size_quad);
1930  case 8:
1931  return get_matching_group_value(groups_buffer, h, key, key_count, row_size_quad);
1932  default:;
1933  }
1934  return nullptr;
1935 }
__device__ int64_t * get_matching_group_value(int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_columnar ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_qw_count,
const size_t  entry_count 
)

Definition at line 1985 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

1990  {
1991  auto off = h;
1992  if (groups_buffer[off] == EMPTY_KEY_64) {
1993  for (size_t i = 0; i < key_qw_count; ++i) {
1994  groups_buffer[off] = key[i];
1995  off += entry_count;
1996  }
1997  return &groups_buffer[off];
1998  }
1999  off = h;
2000  for (size_t i = 0; i < key_qw_count; ++i) {
2001  if (groups_buffer[off] != key[i]) {
2002  return nullptr;
2003  }
2004  off += entry_count;
2005  }
2006  return &groups_buffer[off];
2007 }
#define EMPTY_KEY_64
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const T *  key,
const uint32_t  key_count 
)

Definition at line 1938 of file RuntimeFunctions.cpp.

References heavydb.dtypes::T.

1942  {
1943  auto off = h;
1944  auto key_buffer = reinterpret_cast<T*>(groups_buffer);
1945  if (key_buffer[off] == get_empty_key<T>()) {
1946  for (size_t i = 0; i < key_count; ++i) {
1947  key_buffer[off] = key[i];
1948  off += entry_count;
1949  }
1950  return h;
1951  }
1952  off = h;
1953  for (size_t i = 0; i < key_count; ++i) {
1954  if (key_buffer[off] != key[i]) {
1955  return -1;
1956  }
1957  off += entry_count;
1958  }
1959  return h;
1960 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width 
)

Definition at line 1963 of file RuntimeFunctions.cpp.

References get_matching_group_value_columnar_slot().

1968  {
1969  switch (key_width) {
1970  case 4:
1971  return get_matching_group_value_columnar_slot(groups_buffer,
1972  entry_count,
1973  h,
1974  reinterpret_cast<const int32_t*>(key),
1975  key_count);
1976  case 8:
1978  groups_buffer, entry_count, h, key, key_count);
1979  default:
1980  return -1;
1981  }
1982  return -1;
1983 }
__device__ int32_t get_matching_group_value_columnar_slot(int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 2020 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2025  {
2026  uint32_t off = hashed_index * row_size_quad;
2027  if (groups_buffer[off] == EMPTY_KEY_64) {
2028  for (uint32_t i = 0; i < key_count; ++i) {
2029  groups_buffer[off + i] = key[i];
2030  }
2031  }
2032  return groups_buffer + off + key_count;
2033 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash_keyless ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const uint32_t  row_size_quad 
)

For a particular hashed index (only used with multi-column perfect hash group by) it returns the row-wise offset of the group in the output buffer. Since it is intended for keyless hash use, it assumes there is no group columns prepending the output buffer.

Definition at line 2042 of file RuntimeFunctions.cpp.

2044  {
2045  return groups_buffer + row_size_quad * hashed_index;
2046 }
GPU_RT_STUB int64_t get_thread_index ( )

Definition at line 1800 of file RuntimeFunctions.cpp.

1800  {
1801  return 0;
1802 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos ( const int64_t  num_elems,
const int64_t  null_start_pos,
const int64_t  null_end_pos 
)

Definition at line 479 of file RuntimeFunctions.cpp.

481  {
482  return null_end_pos == num_elems ? null_start_pos : num_elems;
483 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos ( const int64_t  null_start_pos,
const int64_t  null_end_pos 
)

Definition at line 474 of file RuntimeFunctions.cpp.

474  {
475  return null_start_pos == 0 ? null_end_pos + 1 : 0;
476 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal ( size_t  level,
size_t  tree_fanout 
)

Definition at line 846 of file RuntimeFunctions.cpp.

846  {
847  size_t offset = 0;
848  for (size_t i = 0; i < level; i++) {
849  offset += pow(tree_fanout, i);
850  }
851  return offset;
852 }
RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ( )

Definition at line 1788 of file RuntimeFunctions.cpp.

References pos_start_impl().

1788  {
1789  return pos_start_impl(nullptr);
1790 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const bool  blocks_share_memory,
const int32_t  frag_idx 
)

Definition at line 1867 of file RuntimeFunctions.cpp.

1875  {
1876 #ifndef _WIN32
1877  // the body is not really needed, just make sure the call is not optimized away
1878  assert(groups_buffer);
1879 #endif
1880 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1853 of file RuntimeFunctions.cpp.

1860  {
1861 #ifndef _WIN32
1862  // the body is not really needed, just make sure the call is not optimized away
1863  assert(groups_buffer);
1864 #endif
1865 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1882 of file RuntimeFunctions.cpp.

1889  {
1890 #ifndef _WIN32
1891  // the body is not really needed, just make sure the call is not optimized away
1892  assert(groups_buffer);
1893 #endif
1894 }
RUNTIME_EXPORT int64_t* init_shared_mem ( const int64_t *  global_groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1848 of file RuntimeFunctions.cpp.

1849  {
1850  return nullptr;
1851 }
RUNTIME_EXPORT NEVER_INLINE const int64_t* init_shared_mem_nop ( const int64_t *  groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1833 of file RuntimeFunctions.cpp.

1835  {
1836  return groups_buffer;
1837 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t key_for_string_encoded ( const int32_t  str_id)

Definition at line 2137 of file RuntimeFunctions.cpp.

2137  {
2138  return str_id;
2139 }
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count ( uint8_t *  bitmap,
const uint32_t  bitmap_bytes,
const uint8_t *  key_bytes,
const uint32_t  key_len 
)

Definition at line 2324 of file RuntimeFunctions.cpp.

References MurmurHash3().

2328  {
2329  const uint32_t bit_pos = MurmurHash3(key_bytes, key_len, 0) % (bitmap_bytes * 8);
2330  const uint32_t word_idx = bit_pos / 32;
2331  const uint32_t bit_idx = bit_pos % 32;
2332  reinterpret_cast<uint32_t*>(bitmap)[word_idx] |= 1 << bit_idx;
2333 }
RUNTIME_EXPORT NEVER_INLINE DEVICE uint32_t MurmurHash3(const void *key, int len, const uint32_t seed)
Definition: MurmurHash.cpp:33

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal ( const int64_t *  sum,
const int64_t *  count,
const double  null_val,
const uint32_t  scale 
)

Definition at line 2303 of file RuntimeFunctions.cpp.

2306  {
2307  return *count != 0 ? (static_cast<double>(*sum) / pow(10, scale)) / *count : null_val;
2308 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double ( const int64_t *  agg,
const int64_t *  count,
const double  null_val 
)

Definition at line 2310 of file RuntimeFunctions.cpp.

2312  {
2313  return *count != 0 ? *reinterpret_cast<const double*>(may_alias_ptr(agg)) / *count
2314  : null_val;
2315 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float ( const int32_t *  agg,
const int32_t *  count,
const double  null_val 
)

Definition at line 2317 of file RuntimeFunctions.cpp.

2319  {
2320  return *count != 0 ? *reinterpret_cast<const float*>(may_alias_ptr(agg)) / *count
2321  : null_val;
2322 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int ( const int64_t *  sum,
const int64_t *  count,
const double  null_val 
)

Definition at line 2297 of file RuntimeFunctions.cpp.

2299  {
2300  return *count != 0 ? static_cast<double>(*sum) / *count : null_val;
2301 }
RUNTIME_EXPORT ALWAYS_INLINE double load_double ( const int64_t *  agg)

Definition at line 2289 of file RuntimeFunctions.cpp.

2289  {
2290  return *reinterpret_cast<const double*>(may_alias_ptr(agg));
2291 }
RUNTIME_EXPORT ALWAYS_INLINE float load_float ( const int32_t *  agg)

Definition at line 2293 of file RuntimeFunctions.cpp.

2293  {
2294  return *reinterpret_cast<const float*>(may_alias_ptr(agg));
2295 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 336 of file RuntimeFunctions.cpp.

338  {
339  if (lhs == null_val) {
340  return rhs == 0 ? rhs : null_val;
341  }
342  if (rhs == null_val) {
343  return lhs == 0 ? lhs : null_val;
344  }
345  return (lhs && rhs) ? 1 : 0;
346 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not ( const int8_t  operand,
const int8_t  null_val 
)

Definition at line 331 of file RuntimeFunctions.cpp.

332  {
333  return operand == null_val ? operand : (operand ? 0 : 1);
334 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 348 of file RuntimeFunctions.cpp.

350  {
351  if (lhs == null_val) {
352  return rhs == 0 ? null_val : rhs;
353  }
354  if (rhs == null_val) {
355  return lhs == 0 ? null_val : lhs;
356  }
357  return (lhs || rhs) ? 1 : 0;
358 }
ALWAYS_INLINE DEVICE int32_t map_string_dict_id ( const int32_t  string_id,
const int64_t  translation_map_handle,
const int32_t  min_source_id 
)

Definition at line 2142 of file RuntimeFunctions.cpp.

2144  {
2145  const int32_t* translation_map =
2146  reinterpret_cast<const int32_t*>(translation_map_handle);
2147  return translation_map[string_id - min_source_id];
2148 }
RUNTIME_EXPORT void multifrag_query ( const int8_t ***  col_buffers,
const uint64_t *  num_fragments,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
int32_t *  total_matched,
const int64_t *  init_agg_value,
int64_t **  out,
int32_t *  error_code,
const uint32_t *  num_tables_ptr,
const int64_t *  join_hash_tables 
)

Definition at line 2398 of file RuntimeFunctions.cpp.

References query_stub().

2408  {
2409  for (uint32_t i = 0; i < *num_fragments; ++i) {
2410  query_stub(col_buffers ? col_buffers[i] : nullptr,
2411  &num_rows[i * (*num_tables_ptr)],
2412  &frag_row_offsets[i * (*num_tables_ptr)],
2413  max_matched,
2414  init_agg_value,
2415  out,
2416  i,
2417  join_hash_tables,
2418  total_matched,
2419  error_code);
2420  }
2421 }
RUNTIME_EXPORT NEVER_INLINE void query_stub(const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)

+ Here is the call graph for this function:

RUNTIME_EXPORT void multifrag_query_hoisted_literals ( const int8_t ***  col_buffers,
const uint64_t *  num_fragments,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
int32_t *  total_matched,
const int64_t *  init_agg_value,
int64_t **  out,
int32_t *  error_code,
const uint32_t *  num_tables_ptr,
const int64_t *  join_hash_tables 
)

Definition at line 2354 of file RuntimeFunctions.cpp.

References query_stub_hoisted_literals().

2366  {
2367  for (uint32_t i = 0; i < *num_fragments; ++i) {
2368  query_stub_hoisted_literals(col_buffers ? col_buffers[i] : nullptr,
2369  literals,
2370  &num_rows[i * (*num_tables_ptr)],
2371  &frag_row_offsets[i * (*num_tables_ptr)],
2372  max_matched,
2373  init_agg_value,
2374  out,
2375  i,
2376  join_hash_tables,
2377  total_matched,
2378  error_code);
2379  }
2380 }
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals(const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 2283 of file RuntimeFunctions.cpp.

2285  {
2286  return reinterpret_cast<const double*>(output_buff)[pos];
2287 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl ( int32_t *  error_code)

Definition at line 1779 of file RuntimeFunctions.cpp.

1779  {
1780  int32_t row_index_resume{0};
1781  if (error_code) {
1782  row_index_resume = error_code[0];
1783  error_code[0] = 0;
1784  }
1785  return row_index_resume;
1786 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ( )

Definition at line 1792 of file RuntimeFunctions.cpp.

1792  {
1793  return 1;
1794 }
RUNTIME_EXPORT NEVER_INLINE void query_stub ( const int8_t **  col_buffers,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
int64_t **  out,
uint32_t  frag_idx,
const int64_t *  join_hash_tables,
int32_t *  error_code,
int32_t *  total_matched 
)

Definition at line 2382 of file RuntimeFunctions.cpp.

Referenced by multifrag_query().

2391  {
2392 #ifndef _WIN32
2393  assert(col_buffers || num_rows || frag_row_offsets || max_matched || init_agg_value ||
2394  out || frag_idx || error_code || join_hash_tables || total_matched);
2395 #endif
2396 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals ( const int8_t **  col_buffers,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
int64_t **  out,
uint32_t  frag_idx,
const int64_t *  join_hash_tables,
int32_t *  error_code,
int32_t *  total_matched 
)

Definition at line 2335 of file RuntimeFunctions.cpp.

Referenced by multifrag_query_hoisted_literals().

2346  {
2347 #ifndef _WIN32
2348  assert(col_buffers || literals || num_rows || frag_row_offsets || max_matched ||
2349  init_agg_value || out || frag_idx || error_code || join_hash_tables ||
2350  total_matched);
2351 #endif
2352 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void record_error_code ( const int32_t  err_code,
int32_t *  error_codes 
)

Definition at line 1814 of file RuntimeFunctions.cpp.

References pos_start_impl().

1815  {
1816  // NB: never override persistent error codes (with code greater than zero).
1817  // On GPU, a projection query with a limit can run out of slots without it
1818  // being an actual error if the limit has been hit. If a persistent error
1819  // (division by zero, for example) occurs before running out of slots, we
1820  // have to avoid overriding it, because there's a risk that the query would
1821  // go through if we override with a potentially benign out-of-slots code.
1822  if (err_code && error_codes[pos_start_impl(nullptr)] <= 0) {
1823  error_codes[pos_start_impl(nullptr)] = err_code;
1824  }
1825 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t row_number_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 2279 of file RuntimeFunctions.cpp.

2279  {
2280  return reinterpret_cast<const int64_t*>(output_buff)[pos];
2281 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool sample_ratio ( const double  proportion,
const int64_t  row_offset 
)

Definition at line 2150 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit().

2152  {
2153  const int64_t threshold = 4294967296 * proportion;
2154  return (row_offset * 2654435761) % 4294967296 < threshold;
2155 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_not_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 223 of file RuntimeFunctions.cpp.

225  {
226  int64_t tmp = scale >> 1;
227  tmp = operand >= 0 ? operand + tmp : operand - tmp;
228  return tmp / scale;
229 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 209 of file RuntimeFunctions.cpp.

211  {
212  // rounded scale down of a decimal
213  if (operand == null_val) {
214  return null_val;
215  }
216 
217  int64_t tmp = scale >> 1;
218  tmp = operand >= 0 ? operand + tmp : operand - tmp;
219  return tmp / scale;
220 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_up ( const int64_t  operand,
const uint64_t  scale,
const int64_t  operand_null_val,
const int64_t  result_null_val 
)

Definition at line 201 of file RuntimeFunctions.cpp.

204  {
205  return operand != operand_null_val ? operand * scale : result_null_val;
206 }
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  entry_count 
)

Definition at line 2053 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2057  {
2058  if (groups_buffer[hashed_index] == EMPTY_KEY_64) {
2059  for (uint32_t i = 0; i < key_count; i++) {
2060  groups_buffer[i * entry_count + hashed_index] = key[i];
2061  }
2062  }
2063 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE uint64_t string_pack ( const int8_t *  ptr,
const int32_t  len 
)

Definition at line 2109 of file RuntimeFunctions.cpp.

2110  {
2111  return (reinterpret_cast<const uint64_t>(ptr) & 0xffffffffffff) |
2112  (static_cast<const uint64_t>(len) << 48);
2113 }
GPU_RT_STUB void sync_threadblock ( )

Definition at line 1772 of file RuntimeFunctions.cpp.

1772 {}
GPU_RT_STUB void sync_warp ( )

Definition at line 1770 of file RuntimeFunctions.cpp.

1770 {}
GPU_RT_STUB void sync_warp_protected ( int64_t  thread_pos,
int64_t  row_count 
)

Definition at line 1771 of file RuntimeFunctions.cpp.

1771 {}
GPU_RT_STUB int8_t thread_warp_idx ( const int8_t  warp_sz)

Definition at line 1796 of file RuntimeFunctions.cpp.

1796  {
1797  return 0;
1798 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 2158 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit(), width_bucket_expr(), and width_bucket_nullable().

2162  {
2163  if (target_value < lower_bound) {
2164  return 0;
2165  } else if (target_value >= upper_bound) {
2166  return partition_count + 1;
2167  }
2168  return ((target_value - lower_bound) * scale_factor) + 1;
2169 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 2231 of file RuntimeFunctions.cpp.

References width_bucket(), and width_bucket_reversed().

Referenced by CodeGenerator::codegen(), getExpressionRange(), ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit(), and width_bucket_expr_nullable().

2235  {
2236  if (reversed) {
2237  return width_bucket_reversed(target_value,
2238  lower_bound,
2239  upper_bound,
2240  partition_count / (lower_bound - upper_bound),
2241  partition_count);
2242  }
2243  return width_bucket(target_value,
2244  lower_bound,
2245  upper_bound,
2246  partition_count / (upper_bound - lower_bound),
2247  partition_count);
2248 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_no_oob_check ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 2265 of file RuntimeFunctions.cpp.

References width_bucket_no_oob_check(), and width_bucket_reversed_no_oob_check().

2269  {
2270  if (reversed) {
2272  target_value, lower_bound, partition_count / (lower_bound - upper_bound));
2273  }
2275  target_value, lower_bound, partition_count / (upper_bound - lower_bound));
2276 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_nullable ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2251 of file RuntimeFunctions.cpp.

References width_bucket_expr().

2256  {
2257  if (target_value == null_val) {
2258  return INT32_MIN;
2259  }
2260  return width_bucket_expr(
2261  target_value, reversed, lower_bound, upper_bound, partition_count);
2262 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr(const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 2217 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

2219  {
2220  return ((target_value - lower_bound) * scale_factor) + 1;
2221 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2186 of file RuntimeFunctions.cpp.

References width_bucket().

2191  {
2192  if (target_value == null_val) {
2193  return INT32_MIN;
2194  }
2195  return width_bucket(
2196  target_value, lower_bound, upper_bound, scale_factor, partition_count);
2197 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 2172 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr(), and width_bucket_reversed_nullable().

2176  {
2177  if (target_value > lower_bound) {
2178  return 0;
2179  } else if (target_value <= upper_bound) {
2180  return partition_count + 1;
2181  }
2182  return ((lower_bound - target_value) * scale_factor) + 1;
2183 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 2224 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

2226  {
2227  return ((lower_bound - target_value) * scale_factor) + 1;
2228 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_reversed_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2200 of file RuntimeFunctions.cpp.

References width_bucket_reversed().

2205  {
2206  if (target_value == null_val) {
2207  return INT32_MIN;
2208  }
2209  return width_bucket_reversed(
2210  target_value, lower_bound, upper_bound, scale_factor, partition_count);
2211 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

GPU_RT_STUB void write_back_non_grouped_agg ( int64_t *  input_buffer,
int64_t *  output_buffer,
const int32_t  num_agg_cols 
)

Definition at line 1774 of file RuntimeFunctions.cpp.

1776  {};
RUNTIME_EXPORT NEVER_INLINE void write_back_nop ( int64_t *  dest,
int64_t *  src,
const int32_t  sz 
)

Definition at line 1839 of file RuntimeFunctions.cpp.

1841  {
1842 #ifndef _WIN32
1843  // the body is not really needed, just make sure the call is not optimized away
1844  assert(dest);
1845 #endif
1846 }