OmniSciDB  085a039ca4
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
RuntimeFunctions.cpp File Reference
#include "RuntimeFunctions.h"
#include "../Shared/funcannotations.h"
#include "BufferCompaction.h"
#include "HyperLogLogRank.h"
#include "MurmurHash.h"
#include "Shared/quantile.h"
#include "TypePunning.h"
#include <algorithm>
#include <atomic>
#include <chrono>
#include <cmath>
#include <cstdio>
#include <cstring>
#include <thread>
#include <tuple>
#include "DecodersImpl.h"
#include "GeoOpsRuntime.cpp"
#include "GroupByRuntime.cpp"
#include "JoinHashTable/Runtime/JoinHashTableQueryRuntime.cpp"
#include "TopKRuntime.cpp"
+ Include dependency graph for RuntimeFunctions.cpp:

Go to the source code of this file.

Macros

#define DEF_ARITH_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_SAFE_DIV_NULLABLE(type, null_type, opname)
 
#define DEF_BINARY_NULLABLE_ALL_OPS(type, null_type)
 
#define DEF_UMINUS_NULLABLE(type, null_type)
 
#define DEF_CAST_NULLABLE(from_type, to_type)
 
#define DEF_CAST_SCALED_NULLABLE(from_type, to_type)
 
#define DEF_CAST_NULLABLE_BIDIR(type1, type2)
 
#define DEF_ROUND_NULLABLE(from_type, to_type)
 
#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))
 
#define DEF_AGG_MAX_INT(n)
 
#define DEF_AGG_MIN_INT(n)
 
#define DEF_AGG_ID_INT(n)
 
#define DEF_CHECKED_SINGLE_AGG_ID_INT(n)
 
#define DEF_WRITE_PROJECTION_INT(n)
 
#define DEF_SKIP_AGG_ADD(base_agg_func)
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DATA_T   int64_t
 
#define DATA_T   int32_t
 
#define DATA_T   int16_t
 
#define DATA_T   int8_t
 
#define DEF_SKIP_AGG_ADD(base_agg_func)
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DATA_T   double
 
#define ADDR_T   int64_t
 
#define DATA_T   float
 
#define ADDR_T   int32_t
 
#define DEF_SHARED_AGG_RET_STUBS(base_agg_func)
 
#define DEF_SHARED_AGG_STUBS(base_agg_func)
 

Functions

RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_up (const int64_t operand, const uint64_t scale, const int64_t operand_null_val, const int64_t result_null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_not_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_lhs (const int64_t dividend, const int64_t divisor)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_nullable_lhs (const int64_t dividend, const int64_t divisor, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not (const int8_t operand, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count (uint64_t *agg, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap (int64_t *agg, const int64_t val, const int64_t min_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct (int64_t *agg, const int64_t key, const uint32_t b)
 
GPU_RT_STUB void agg_approximate_count_distinct_gpu (int64_t *, const int64_t, const uint32_t, const int64_t, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set (const int64_t bitset, const int64_t val, const int64_t min_val, const int64_t max_val, const int64_t null_val, const int8_t null_bool_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int8_t * 
agg_id_varlen (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id (int64_t *agg, const int64_t val, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val (int64_t *agg, const int64_t val, const int64_t min_val, const int64_t skip_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32 (uint32_t *agg, const int32_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32 (int32_t *agg, const int32_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_skip_val (int64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32_skip_val (int32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_skip_val (uint64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32_skip_val (uint32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double (uint64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_double (int64_t *agg, const double val, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float (uint32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_float (int32_t *agg, const float val, const float null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double_skip_val (uint64_t *agg, const double val, const double skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float_skip_val (uint32_t *agg, const float val, const float skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_floor (const int64_t x, const int64_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_ceil (const int64_t x, const int64_t scale)
 
GPU_RT_STUB int8_t * agg_id_varlen_shared (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
GPU_RT_STUB int32_t checked_single_agg_id_shared (int64_t *agg, const int64_t val, const int64_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared (int32_t *agg, const int32_t val, const int32_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared (int16_t *agg, const int16_t val, const int16_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared (int8_t *agg, const int8_t val, const int8_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_double_shared (int64_t *agg, const double val, const double null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_float_shared (int32_t *agg, const float val, const float null_val)
 
GPU_RT_STUB void agg_max_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_max_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_min_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_min_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_id_double_shared_slow (int64_t *agg, const double *val)
 
GPU_RT_STUB int64_t agg_sum_shared (int64_t *agg, const int64_t val)
 
GPU_RT_STUB int64_t agg_sum_skip_val_shared (int64_t *agg, const int64_t val, const int64_t skip_val)
 
GPU_RT_STUB int32_t agg_sum_int32_shared (int32_t *agg, const int32_t val)
 
GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared (int32_t *agg, const int32_t val, const int32_t skip_val)
 
GPU_RT_STUB void agg_sum_double_shared (int64_t *agg, const double val)
 
GPU_RT_STUB void agg_sum_double_skip_val_shared (int64_t *agg, const double val, const double skip_val)
 
GPU_RT_STUB void agg_sum_float_shared (int32_t *agg, const float val)
 
GPU_RT_STUB void agg_sum_float_skip_val_shared (int32_t *agg, const float val, const float skip_val)
 
GPU_RT_STUB void force_sync ()
 
GPU_RT_STUB void sync_warp ()
 
GPU_RT_STUB void sync_warp_protected (int64_t thread_pos, int64_t row_count)
 
GPU_RT_STUB void sync_threadblock ()
 
GPU_RT_STUB void write_back_non_grouped_agg (int64_t *input_buffer, int64_t *output_buffer, const int32_t num_agg_cols)
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl (int32_t *error_code)
 
RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ()
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ()
 
GPU_RT_STUB int8_t thread_warp_idx (const int8_t warp_sz)
 
GPU_RT_STUB int64_t get_thread_index ()
 
GPU_RT_STUB int64_t * declare_dynamic_shared_memory ()
 
GPU_RT_STUB int64_t get_block_index ()
 
RUNTIME_EXPORT ALWAYS_INLINE void record_error_code (const int32_t err_code, int32_t *error_codes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_error_code (int32_t *error_codes)
 
RUNTIME_EXPORT NEVER_INLINE
const int64_t * 
init_shared_mem_nop (const int64_t *groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void write_back_nop (int64_t *dest, int64_t *src, const int32_t sz)
 
RUNTIME_EXPORT int64_t * init_shared_mem (const int64_t *global_groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const bool blocks_share_memory, const int32_t frag_idx)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
template<typename T >
ALWAYS_INLINE int64_t * get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width, const uint32_t row_size_quad)
 
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_columnar (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_qw_count, const size_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash_keyless (int64_t *groups_buffer, const uint32_t hashed_index, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless_semiprivate (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad, const uint8_t thread_warp_idx, const uint8_t warp_size)
 
RUNTIME_EXPORT ALWAYS_INLINE
int8_t * 
extract_str_ptr (const uint64_t str_and_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
extract_str_len (const uint64_t str_and_len)
 
RUNTIME_EXPORT NEVER_INLINE
int8_t * 
extract_str_ptr_noinline (const uint64_t str_and_len)
 
RUNTIME_EXPORT NEVER_INLINE int32_t extract_str_len_noinline (const uint64_t str_and_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
string_pack (const int8_t *ptr, const int32_t len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length (const char *str, const int32_t str_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length_nullable (const char *str, const int32_t str_len, const int32_t int_null)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
key_for_string_encoded (const int32_t str_id)
 
ALWAYS_INLINE DEVICE int32_t map_string_dict_id (const int32_t string_id, const int64_t translation_map_handle, const int32_t min_source_id)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
sample_ratio (const double proportion, const int64_t row_offset)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_reversed_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_nullable (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_no_oob_check (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
row_number_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_double (const int64_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE float load_float (const int32_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int (const int64_t *sum, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal (const int64_t *sum, const int64_t *count, const double null_val, const uint32_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double (const int64_t *agg, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float (const int32_t *agg, const int32_t *count, const double null_val)
 
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count (uint8_t *bitmap, const uint32_t bitmap_bytes, const uint8_t *key_bytes, const uint32_t key_len)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals (const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)
 
RUNTIME_EXPORT void multifrag_query_hoisted_literals (const int8_t ***col_buffers, const uint64_t *num_fragments, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, int32_t *total_matched, const int64_t *init_agg_value, int64_t **out, int32_t *error_code, const uint32_t *num_tables_ptr, const int64_t *join_hash_tables)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub (const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)
 
RUNTIME_EXPORT void multifrag_query (const int8_t ***col_buffers, const uint64_t *num_fragments, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, int32_t *total_matched, const int64_t *init_agg_value, int64_t **out, int32_t *error_code, const uint32_t *num_tables_ptr, const int64_t *join_hash_tables)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
check_interrupt ()
 
RUNTIME_EXPORT bool check_interrupt_init (unsigned command)
 

Macro Definition Documentation

#define ADDR_T   int64_t

Definition at line 768 of file RuntimeFunctions.cpp.

#define ADDR_T   int32_t

Definition at line 768 of file RuntimeFunctions.cpp.

#define DATA_T   int64_t

Definition at line 767 of file RuntimeFunctions.cpp.

#define DATA_T   int32_t

Definition at line 767 of file RuntimeFunctions.cpp.

#define DATA_T   int16_t

Definition at line 767 of file RuntimeFunctions.cpp.

#define DATA_T   int8_t

Definition at line 767 of file RuntimeFunctions.cpp.

#define DATA_T   double

Definition at line 767 of file RuntimeFunctions.cpp.

#define DATA_T   float

Definition at line 767 of file RuntimeFunctions.cpp.

#define DEF_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_id_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = val; \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 498 of file RuntimeFunctions.cpp.

#define DEF_AGG_MAX_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_max_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::max(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 476 of file RuntimeFunctions.cpp.

#define DEF_AGG_MIN_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_min_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::min(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 487 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 44 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_lhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 53 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_rhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 62 of file RuntimeFunctions.cpp.

#define DEF_BINARY_NULLABLE_ALL_OPS (   type,
  null_type 
)

Definition at line 116 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val ? to_null_val : operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 239 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE_BIDIR (   type1,
  type2 
)
Value:
DEF_CAST_NULLABLE(type1, type2) \
DEF_CAST_NULLABLE(type2, type1)
#define DEF_CAST_NULLABLE(from_type, to_type)

Definition at line 256 of file RuntimeFunctions.cpp.

#define DEF_CAST_SCALED_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_scaled_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val, \
const to_type multiplier) { \
return operand == from_null_val ? to_null_val : multiplier * operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 247 of file RuntimeFunctions.cpp.

#define DEF_CHECKED_SINGLE_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_int##n( \
int##n##_t* agg, const int##n##_t val, const int##n##_t null_val) { \
if (val == null_val) { \
return 0; \
} \
if (*agg == val) { \
return 0; \
} else if (*agg == null_val) { \
*agg = val; \
return 0; \
} else { \
/* see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES*/ \
return 15; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 504 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 71 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_lhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 83 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_rhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 95 of file RuntimeFunctions.cpp.

#define DEF_ROUND_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val \
? to_null_val \
: static_cast<to_type>(operand + (operand < from_type(0) \
? from_type(-0.5) \
: from_type(0.5))); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 260 of file RuntimeFunctions.cpp.

#define DEF_SAFE_DIV_NULLABLE (   type,
  null_type,
  opname 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type safe_div_##type( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val && rhs != 0) { \
return lhs / rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 107 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_RET_STUBS (   base_agg_func)

Definition at line 796 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_STUBS (   base_agg_func)
Value:
extern "C" GPU_RT_STUB void base_agg_func##_shared(int64_t* agg, const int64_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_skip_val_shared( \
int64_t* agg, const int64_t val, const int64_t skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int32_shared(int32_t* agg, \
const int32_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int16_shared(int16_t* agg, \
const int16_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int8_shared(int8_t* agg, \
const int8_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_int32_skip_val_shared( \
int32_t* agg, const int32_t val, const int32_t skip_val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_shared(int64_t* agg, \
const double val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_skip_val_shared( \
int64_t* agg, const double val, const double skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_float_shared(int32_t* agg, \
const float val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_float_skip_val_shared( \
int32_t* agg, const float val, const float skip_val) {}
#define GPU_RT_STUB

Definition at line 835 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const DATA_T old_agg = *agg; \
if (old_agg != skip_val) { \
base_agg_func(agg, val); \
} else { \
*agg = val; \
} \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 746 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const ADDR_T old_agg = *agg; \
if (old_agg != *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&skip_val))) { \
base_agg_func(agg, val); \
} else { \
*agg = *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&val)); \
} \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 746 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG_ADD (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
base_agg_func(agg, val); \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 738 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG_ADD (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
base_agg_func(agg, val); \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 738 of file RuntimeFunctions.cpp.

#define DEF_UMINUS_NULLABLE (   type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type uminus_##type##_nullable( \
const type operand, const null_type null_val) { \
return operand == null_val ? null_val : -operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 224 of file RuntimeFunctions.cpp.

#define DEF_WRITE_PROJECTION_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void write_projection_int##n( \
int8_t* slot_ptr, const int##n##_t val, const int64_t init_val) { \
if (val != init_val) { \
*reinterpret_cast<int##n##_t*>(slot_ptr) = val; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 532 of file RuntimeFunctions.cpp.

#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))

Definition at line 352 of file RuntimeFunctions.cpp.

Function Documentation

RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct ( int64_t *  agg,
const int64_t  key,
const uint32_t  b 
)

Definition at line 364 of file RuntimeFunctions.cpp.

References get_rank(), and MurmurHash64A().

364  {
365  const uint64_t hash = MurmurHash64A(&key, sizeof(key), 0);
366  const uint32_t index = hash >> (64 - b);
367  const uint8_t rank = get_rank(hash << b, 64 - b);
368  uint8_t* M = reinterpret_cast<uint8_t*>(*agg);
369  M[index] = std::max(M[index], rank);
370 }
FORCE_INLINE uint8_t get_rank(uint64_t x, uint32_t b)
RUNTIME_EXPORT NEVER_INLINE DEVICE uint64_t MurmurHash64A(const void *key, int len, uint64_t seed)
Definition: MurmurHash.cpp:27

+ Here is the call graph for this function:

GPU_RT_STUB void agg_approximate_count_distinct_gpu ( int64_t *  ,
const int64_t  ,
const uint32_t  ,
const int64_t  ,
const int64_t   
)

Definition at line 372 of file RuntimeFunctions.cpp.

376  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count ( uint64_t *  agg,
const int64_t   
)

Definition at line 339 of file RuntimeFunctions.cpp.

Referenced by agg_count_skip_val(), and anonymous_namespace{GroupByAndAggregate.cpp}::get_agg_count().

339  {
340  return (*agg)++;
341 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val 
)

Definition at line 344 of file RuntimeFunctions.cpp.

Referenced by agg_count_distinct_bitmap_skip_val(), WindowFunctionContext::fillPartitionEnd(), WindowFunctionContext::fillPartitionStart(), anonymous_namespace{WindowContext.cpp}::index_to_partition_end(), and InValuesBitmap::InValuesBitmap().

344  {
345  const uint64_t bitmap_idx = val - min_val;
346  reinterpret_cast<int8_t*>(*agg)[bitmap_idx >> 3] |= (1 << (bitmap_idx & 7));
347 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 355 of file RuntimeFunctions.cpp.

361  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val,
const int64_t  skip_val 
)

Definition at line 445 of file RuntimeFunctions.cpp.

References agg_count_distinct_bitmap().

449  {
450  if (val != skip_val) {
451  agg_count_distinct_bitmap(agg, val, min_val);
452  }
453 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap(int64_t *agg, const int64_t val, const int64_t min_val)

+ Here is the call graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 455 of file RuntimeFunctions.cpp.

462  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double ( uint64_t *  agg,
const double  val 
)

Definition at line 633 of file RuntimeFunctions.cpp.

Referenced by agg_count_double_skip_val().

634  {
635  return (*agg)++;
636 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double_skip_val ( uint64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 723 of file RuntimeFunctions.cpp.

References agg_count_double().

723  {
724  if (val != skip_val) {
725  return agg_count_double(agg, val);
726  }
727  return *agg;
728 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double(uint64_t *agg, const double val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float ( uint32_t *  agg,
const float  val 
)

Definition at line 678 of file RuntimeFunctions.cpp.

Referenced by agg_count_float_skip_val().

679  {
680  return (*agg)++;
681 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float_skip_val ( uint32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 731 of file RuntimeFunctions.cpp.

References agg_count_float().

731  {
732  if (val != skip_val) {
733  return agg_count_float(agg, val);
734  }
735  return *agg;
736 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float(uint32_t *agg, const float val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32 ( uint32_t *  agg,
const int32_t   
)

Definition at line 464 of file RuntimeFunctions.cpp.

Referenced by agg_count_int32_skip_val().

465  {
466  return (*agg)++;
467 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32_skip_val ( uint32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 580 of file RuntimeFunctions.cpp.

References agg_count_int32().

580  {
581  if (val != skip_val) {
582  return agg_count_int32(agg, val);
583  }
584  return *agg;
585 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32(uint32_t *agg, const int32_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_skip_val ( uint64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 572 of file RuntimeFunctions.cpp.

References agg_count().

572  {
573  if (val != skip_val) {
574  return agg_count(agg, val);
575  }
576  return *agg;
577 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count(uint64_t *agg, const int64_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_id ( int64_t *  agg,
const int64_t  val 
)

Definition at line 414 of file RuntimeFunctions.cpp.

414  {
415  *agg = val;
416 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double ( int64_t *  agg,
const double  val 
)

Definition at line 656 of file RuntimeFunctions.cpp.

657  {
658  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
659 }
GPU_RT_STUB void agg_id_double_shared_slow ( int64_t *  agg,
const double *  val 
)

Definition at line 926 of file RuntimeFunctions.cpp.

926 {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float ( int32_t *  agg,
const float  val 
)

Definition at line 701 of file RuntimeFunctions.cpp.

701  {
702  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
703 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t* agg_id_varlen ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 418 of file RuntimeFunctions.cpp.

421  {
422  for (auto i = 0; i < size_bytes; i++) {
423  varlen_buffer[offset + i] = value[i];
424  }
425  return &varlen_buffer[offset];
426 }
GPU_RT_STUB int8_t* agg_id_varlen_shared ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 866 of file RuntimeFunctions.cpp.

869  {
870  return nullptr;
871 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max ( int64_t *  agg,
const int64_t  val 
)

Definition at line 406 of file RuntimeFunctions.cpp.

406  {
407  *agg = std::max(*agg, val);
408 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double ( int64_t *  agg,
const double  val 
)

Definition at line 644 of file RuntimeFunctions.cpp.

645  {
646  const auto r = std::max(*reinterpret_cast<const double*>(agg), val);
647  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
648 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float ( int32_t *  agg,
const float  val 
)

Definition at line 689 of file RuntimeFunctions.cpp.

690  {
691  const auto r = std::max(*reinterpret_cast<const float*>(agg), val);
692  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
693 }
GPU_RT_STUB void agg_max_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 910 of file RuntimeFunctions.cpp.

912  {}
GPU_RT_STUB void agg_max_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 914 of file RuntimeFunctions.cpp.

916  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_min ( int64_t *  agg,
const int64_t  val 
)

Definition at line 410 of file RuntimeFunctions.cpp.

410  {
411  *agg = std::min(*agg, val);
412 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double ( int64_t *  agg,
const double  val 
)

Definition at line 650 of file RuntimeFunctions.cpp.

651  {
652  const auto r = std::min(*reinterpret_cast<const double*>(agg), val);
653  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
654 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float ( int32_t *  agg,
const float  val 
)

Definition at line 695 of file RuntimeFunctions.cpp.

696  {
697  const auto r = std::min(*reinterpret_cast<const float*>(agg), val);
698  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
699 }
GPU_RT_STUB void agg_min_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 918 of file RuntimeFunctions.cpp.

920  {}
GPU_RT_STUB void agg_min_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 922 of file RuntimeFunctions.cpp.

924  {}
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum ( int64_t *  agg,
const int64_t  val 
)

Definition at line 400 of file RuntimeFunctions.cpp.

Referenced by agg_sum_skip_val().

400  {
401  const auto old = *agg;
402  *agg += val;
403  return old;
404 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double ( int64_t *  agg,
const double  val 
)

Definition at line 638 of file RuntimeFunctions.cpp.

639  {
640  const auto r = *reinterpret_cast<const double*>(agg) + val;
641  *agg = *reinterpret_cast<const int64_t*>(may_alias_ptr(&r));
642 }
GPU_RT_STUB void agg_sum_double_shared ( int64_t *  agg,
const double  val 
)

Definition at line 947 of file RuntimeFunctions.cpp.

947 {}
GPU_RT_STUB void agg_sum_double_skip_val_shared ( int64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 949 of file RuntimeFunctions.cpp.

951  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float ( int32_t *  agg,
const float  val 
)

Definition at line 683 of file RuntimeFunctions.cpp.

684  {
685  const auto r = *reinterpret_cast<const float*>(agg) + val;
686  *agg = *reinterpret_cast<const int32_t*>(may_alias_ptr(&r));
687 }
GPU_RT_STUB void agg_sum_float_shared ( int32_t *  agg,
const float  val 
)

Definition at line 952 of file RuntimeFunctions.cpp.

952 {}
GPU_RT_STUB void agg_sum_float_skip_val_shared ( int32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 954 of file RuntimeFunctions.cpp.

956  {}
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32 ( int32_t *  agg,
const int32_t  val 
)

Definition at line 469 of file RuntimeFunctions.cpp.

Referenced by agg_sum_int32_skip_val().

470  {
471  const auto old = *agg;
472  *agg += val;
473  return old;
474 }

+ Here is the caller graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_shared ( int32_t *  agg,
const int32_t  val 
)

Definition at line 937 of file RuntimeFunctions.cpp.

937  {
938  return 0;
939 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32_skip_val ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 559 of file RuntimeFunctions.cpp.

References agg_sum_int32().

559  {
560  const auto old = *agg;
561  if (val != skip_val) {
562  if (old != skip_val) {
563  return agg_sum_int32(agg, val);
564  } else {
565  *agg = val;
566  }
567  }
568  return old;
569 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32(int32_t *agg, const int32_t val)

+ Here is the call graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 941 of file RuntimeFunctions.cpp.

943  {
944  return 0;
945 }
GPU_RT_STUB int64_t agg_sum_shared ( int64_t *  agg,
const int64_t  val 
)

Definition at line 928 of file RuntimeFunctions.cpp.

928  {
929  return 0;
930 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 544 of file RuntimeFunctions.cpp.

References agg_sum().

Referenced by Executor::reduceResults().

546  {
547  const auto old = *agg;
548  if (val != skip_val) {
549  if (old != skip_val) {
550  return agg_sum(agg, val);
551  } else {
552  *agg = val;
553  }
554  }
555  return old;
556 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum(int64_t *agg, const int64_t val)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t agg_sum_skip_val_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 932 of file RuntimeFunctions.cpp.

934  {
935  return 0;
936 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set ( const int64_t  bitset,
const int64_t  val,
const int64_t  min_val,
const int64_t  max_val,
const int64_t  null_val,
const int8_t  null_bool_val 
)

Definition at line 378 of file RuntimeFunctions.cpp.

383  {
384  if (val == null_val) {
385  return null_bool_val;
386  }
387  if (val < min_val || val > max_val) {
388  return 0;
389  }
390  if (!bitset) {
391  return 0;
392  }
393  const uint64_t bitmap_idx = val - min_val;
394  return (reinterpret_cast<const int8_t*>(bitset))[bitmap_idx >> 3] &
395  (1 << (bitmap_idx & 7))
396  ? 1
397  : 0;
398 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length ( const char *  str,
const int32_t  str_len 
)

Definition at line 1314 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit().

1314  {
1315  return str_len;
1316 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length_nullable ( const char *  str,
const int32_t  str_len,
const int32_t  int_null 
)

Definition at line 1319 of file RuntimeFunctions.cpp.

1319  {
1320  if (!str) {
1321  return int_null;
1322  }
1323  return str_len;
1324 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool check_interrupt ( )

Definition at line 1613 of file RuntimeFunctions.cpp.

References check_interrupt_init(), INT_CHECK, and runtime_interrupt_flag.

1613  {
1614  if (check_interrupt_init(static_cast<unsigned>(INT_CHECK))) {
1615  return true;
1616  }
1617  return false;
1618 }
RUNTIME_EXPORT bool check_interrupt_init(unsigned command)

+ Here is the call graph for this function:

RUNTIME_EXPORT bool check_interrupt_init ( unsigned  command)

Definition at line 1620 of file RuntimeFunctions.cpp.

References INT_ABORT, INT_CHECK, INT_RESET, and runtime_interrupt_flag.

Referenced by check_interrupt(), Executor::interrupt(), and Executor::resetInterrupt().

1620  {
1621  static std::atomic_bool runtime_interrupt_flag{false};
1622 
1623  if (command == static_cast<unsigned>(INT_CHECK)) {
1624  if (runtime_interrupt_flag.load()) {
1625  return true;
1626  }
1627  return false;
1628  }
1629  if (command == static_cast<unsigned>(INT_ABORT)) {
1630  runtime_interrupt_flag.store(true);
1631  return false;
1632  }
1633  if (command == static_cast<unsigned>(INT_RESET)) {
1634  runtime_interrupt_flag.store(false);
1635  return false;
1636  }
1637  return false;
1638 }
__device__ int32_t runtime_interrupt_flag
Definition: cuda_mapd_rt.cu:95

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 429 of file RuntimeFunctions.cpp.

429  {
430  if (val == null_val) {
431  return 0;
432  }
433 
434  if (*agg == val) {
435  return 0;
436  } else if (*agg == null_val) {
437  *agg = val;
438  return 0;
439  } else {
440  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
441  return 15;
442  }
443 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_double ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 662 of file RuntimeFunctions.cpp.

662  {
663  if (val == null_val) {
664  return 0;
665  }
666 
667  if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)))) {
668  return 0;
669  } else if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&null_val)))) {
670  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
671  return 0;
672  } else {
673  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
674  return 15;
675  }
676 }
GPU_RT_STUB int32_t checked_single_agg_id_double_shared ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 898 of file RuntimeFunctions.cpp.

900  {
901  return 0;
902 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_float ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 706 of file RuntimeFunctions.cpp.

706  {
707  if (val == null_val) {
708  return 0;
709  }
710 
711  if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)))) {
712  return 0;
713  } else if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&null_val)))) {
714  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
715  return 0;
716  } else {
717  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
718  return 15;
719  }
720 }
GPU_RT_STUB int32_t checked_single_agg_id_float_shared ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 904 of file RuntimeFunctions.cpp.

906  {
907  return 0;
908 }
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  null_val 
)

Definition at line 886 of file RuntimeFunctions.cpp.

888  {
889  return 0;
890 }
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  null_val 
)

Definition at line 880 of file RuntimeFunctions.cpp.

882  {
883  return 0;
884 }
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  null_val 
)

Definition at line 891 of file RuntimeFunctions.cpp.

893  {
894  return 0;
895 }
GPU_RT_STUB int32_t checked_single_agg_id_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 873 of file RuntimeFunctions.cpp.

875  {
876  return 0;
877 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_ceil ( const int64_t  x,
const int64_t  scale 
)

Definition at line 789 of file RuntimeFunctions.cpp.

References decimal_floor().

790  {
791  return decimal_floor(x, scale) + (x % scale ? scale : 0);
792 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor(const int64_t x, const int64_t scale)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor ( const int64_t  x,
const int64_t  scale 
)

Definition at line 778 of file RuntimeFunctions.cpp.

Referenced by decimal_ceil().

779  {
780  if (x >= 0) {
781  return x / scale * scale;
782  }
783  if (!(x % scale)) {
784  return x;
785  }
786  return x / scale * scale - scale;
787 }

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t* declare_dynamic_shared_memory ( )

Definition at line 994 of file RuntimeFunctions.cpp.

994  {
995  return nullptr;
996 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t extract_str_len ( const uint64_t  str_and_len)

Definition at line 1285 of file RuntimeFunctions.cpp.

Referenced by extract_str_len_noinline().

1285  {
1286  return static_cast<int64_t>(str_and_len) >> 48;
1287 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE int32_t extract_str_len_noinline ( const uint64_t  str_and_len)

Definition at line 1295 of file RuntimeFunctions.cpp.

References extract_str_len().

Referenced by string_compress().

1295  {
1296  return extract_str_len(str_and_len);
1297 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t extract_str_len(const uint64_t str_and_len)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int8_t* extract_str_ptr ( const uint64_t  str_and_len)

Definition at line 1279 of file RuntimeFunctions.cpp.

Referenced by extract_str_ptr_noinline().

1280  {
1281  return reinterpret_cast<int8_t*>(str_and_len & 0xffffffffffff);
1282 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE int8_t* extract_str_ptr_noinline ( const uint64_t  str_and_len)

Definition at line 1289 of file RuntimeFunctions.cpp.

References extract_str_ptr().

Referenced by string_compress().

1290  {
1291  return extract_str_ptr(str_and_len);
1292 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t * extract_str_ptr(const uint64_t str_and_len)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs ( const int64_t  dividend,
const int64_t  divisor 
)

Definition at line 210 of file RuntimeFunctions.cpp.

Referenced by floor_div_nullable_lhs().

211  {
212  return (dividend < 0 ? dividend - (divisor - 1) : dividend) / divisor;
213 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_nullable_lhs ( const int64_t  dividend,
const int64_t  divisor,
const int64_t  null_val 
)

Definition at line 218 of file RuntimeFunctions.cpp.

References floor_div_lhs().

220  {
221  return dividend == null_val ? null_val : floor_div_lhs(dividend, divisor);
222 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs(const int64_t dividend, const int64_t divisor)

+ Here is the call graph for this function:

GPU_RT_STUB void force_sync ( )

Definition at line 958 of file RuntimeFunctions.cpp.

958 {}
GPU_RT_STUB int64_t get_block_index ( )

Definition at line 998 of file RuntimeFunctions.cpp.

998  {
999  return 0;
1000 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_error_code ( int32_t *  error_codes)

Definition at line 1017 of file RuntimeFunctions.cpp.

References pos_start_impl().

1017  {
1018  return error_codes[pos_start_impl(nullptr)];
1019 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad 
)

Definition at line 1259 of file RuntimeFunctions.cpp.

1264  {
1265  return groups_buffer + row_size_quad * (key - min_key);
1266 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless_semiprivate ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad,
const uint8_t  thread_warp_idx,
const uint8_t  warp_size 
)

Definition at line 1268 of file RuntimeFunctions.cpp.

1275  {
1276  return groups_buffer + row_size_quad * (warp_size * (key - min_key) + thread_warp_idx);
1277 }
__device__ int8_t thread_warp_idx(const int8_t warp_sz)
Definition: cuda_mapd_rt.cu:39
template<typename T >
ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const T *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 1087 of file RuntimeFunctions.cpp.

References align_to_int64(), and heavydb.dtypes::T.

1091  {
1092  auto off = h * row_size_quad;
1093  auto row_ptr = reinterpret_cast<T*>(groups_buffer + off);
1094  if (*row_ptr == get_empty_key<T>()) {
1095  memcpy(row_ptr, key, key_count * sizeof(T));
1096  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1097  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1098  }
1099  if (memcmp(row_ptr, key, key_count * sizeof(T)) == 0) {
1100  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1101  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1102  }
1103  return nullptr;
1104 }
FORCE_INLINE HOST DEVICE T align_to_int64(T addr)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width,
const uint32_t  row_size_quad 
)

Definition at line 1106 of file RuntimeFunctions.cpp.

References get_matching_group_value().

1112  {
1113  switch (key_width) {
1114  case 4:
1115  return get_matching_group_value(groups_buffer,
1116  h,
1117  reinterpret_cast<const int32_t*>(key),
1118  key_count,
1119  row_size_quad);
1120  case 8:
1121  return get_matching_group_value(groups_buffer, h, key, key_count, row_size_quad);
1122  default:;
1123  }
1124  return nullptr;
1125 }
__device__ int64_t * get_matching_group_value(int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_columnar ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_qw_count,
const size_t  entry_count 
)

Definition at line 1175 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

1180  {
1181  auto off = h;
1182  if (groups_buffer[off] == EMPTY_KEY_64) {
1183  for (size_t i = 0; i < key_qw_count; ++i) {
1184  groups_buffer[off] = key[i];
1185  off += entry_count;
1186  }
1187  return &groups_buffer[off];
1188  }
1189  off = h;
1190  for (size_t i = 0; i < key_qw_count; ++i) {
1191  if (groups_buffer[off] != key[i]) {
1192  return nullptr;
1193  }
1194  off += entry_count;
1195  }
1196  return &groups_buffer[off];
1197 }
#define EMPTY_KEY_64
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const T *  key,
const uint32_t  key_count 
)

Definition at line 1128 of file RuntimeFunctions.cpp.

References heavydb.dtypes::T.

1132  {
1133  auto off = h;
1134  auto key_buffer = reinterpret_cast<T*>(groups_buffer);
1135  if (key_buffer[off] == get_empty_key<T>()) {
1136  for (size_t i = 0; i < key_count; ++i) {
1137  key_buffer[off] = key[i];
1138  off += entry_count;
1139  }
1140  return h;
1141  }
1142  off = h;
1143  for (size_t i = 0; i < key_count; ++i) {
1144  if (key_buffer[off] != key[i]) {
1145  return -1;
1146  }
1147  off += entry_count;
1148  }
1149  return h;
1150 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width 
)

Definition at line 1153 of file RuntimeFunctions.cpp.

References get_matching_group_value_columnar_slot().

1158  {
1159  switch (key_width) {
1160  case 4:
1161  return get_matching_group_value_columnar_slot(groups_buffer,
1162  entry_count,
1163  h,
1164  reinterpret_cast<const int32_t*>(key),
1165  key_count);
1166  case 8:
1168  groups_buffer, entry_count, h, key, key_count);
1169  default:
1170  return -1;
1171  }
1172  return -1;
1173 }
__device__ int32_t get_matching_group_value_columnar_slot(int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 1210 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

1215  {
1216  uint32_t off = hashed_index * row_size_quad;
1217  if (groups_buffer[off] == EMPTY_KEY_64) {
1218  for (uint32_t i = 0; i < key_count; ++i) {
1219  groups_buffer[off + i] = key[i];
1220  }
1221  }
1222  return groups_buffer + off + key_count;
1223 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash_keyless ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const uint32_t  row_size_quad 
)

For a particular hashed index (only used with multi-column perfect hash group by) it returns the row-wise offset of the group in the output buffer. Since it is intended for keyless hash use, it assumes there is no group columns prepending the output buffer.

Definition at line 1232 of file RuntimeFunctions.cpp.

1234  {
1235  return groups_buffer + row_size_quad * hashed_index;
1236 }
GPU_RT_STUB int64_t get_thread_index ( )

Definition at line 990 of file RuntimeFunctions.cpp.

990  {
991  return 0;
992 }
RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ( )

Definition at line 978 of file RuntimeFunctions.cpp.

References pos_start_impl().

978  {
979  return pos_start_impl(nullptr);
980 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const bool  blocks_share_memory,
const int32_t  frag_idx 
)

Definition at line 1057 of file RuntimeFunctions.cpp.

1065  {
1066 #ifndef _WIN32
1067  // the body is not really needed, just make sure the call is not optimized away
1068  assert(groups_buffer);
1069 #endif
1070 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1043 of file RuntimeFunctions.cpp.

1050  {
1051 #ifndef _WIN32
1052  // the body is not really needed, just make sure the call is not optimized away
1053  assert(groups_buffer);
1054 #endif
1055 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1072 of file RuntimeFunctions.cpp.

1079  {
1080 #ifndef _WIN32
1081  // the body is not really needed, just make sure the call is not optimized away
1082  assert(groups_buffer);
1083 #endif
1084 }
RUNTIME_EXPORT int64_t* init_shared_mem ( const int64_t *  global_groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1038 of file RuntimeFunctions.cpp.

1039  {
1040  return nullptr;
1041 }
RUNTIME_EXPORT NEVER_INLINE const int64_t* init_shared_mem_nop ( const int64_t *  groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1023 of file RuntimeFunctions.cpp.

1025  {
1026  return groups_buffer;
1027 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t key_for_string_encoded ( const int32_t  str_id)

Definition at line 1327 of file RuntimeFunctions.cpp.

1327  {
1328  return str_id;
1329 }
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count ( uint8_t *  bitmap,
const uint32_t  bitmap_bytes,
const uint8_t *  key_bytes,
const uint32_t  key_len 
)

Definition at line 1514 of file RuntimeFunctions.cpp.

References MurmurHash3().

1518  {
1519  const uint32_t bit_pos = MurmurHash3(key_bytes, key_len, 0) % (bitmap_bytes * 8);
1520  const uint32_t word_idx = bit_pos / 32;
1521  const uint32_t bit_idx = bit_pos % 32;
1522  reinterpret_cast<uint32_t*>(bitmap)[word_idx] |= 1 << bit_idx;
1523 }
RUNTIME_EXPORT NEVER_INLINE DEVICE uint32_t MurmurHash3(const void *key, int len, const uint32_t seed)
Definition: MurmurHash.cpp:33

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal ( const int64_t *  sum,
const int64_t *  count,
const double  null_val,
const uint32_t  scale 
)

Definition at line 1493 of file RuntimeFunctions.cpp.

1496  {
1497  return *count != 0 ? (static_cast<double>(*sum) / pow(10, scale)) / *count : null_val;
1498 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double ( const int64_t *  agg,
const int64_t *  count,
const double  null_val 
)

Definition at line 1500 of file RuntimeFunctions.cpp.

1502  {
1503  return *count != 0 ? *reinterpret_cast<const double*>(may_alias_ptr(agg)) / *count
1504  : null_val;
1505 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float ( const int32_t *  agg,
const int32_t *  count,
const double  null_val 
)

Definition at line 1507 of file RuntimeFunctions.cpp.

1509  {
1510  return *count != 0 ? *reinterpret_cast<const float*>(may_alias_ptr(agg)) / *count
1511  : null_val;
1512 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int ( const int64_t *  sum,
const int64_t *  count,
const double  null_val 
)

Definition at line 1487 of file RuntimeFunctions.cpp.

1489  {
1490  return *count != 0 ? static_cast<double>(*sum) / *count : null_val;
1491 }
RUNTIME_EXPORT ALWAYS_INLINE double load_double ( const int64_t *  agg)

Definition at line 1479 of file RuntimeFunctions.cpp.

1479  {
1480  return *reinterpret_cast<const double*>(may_alias_ptr(agg));
1481 }
RUNTIME_EXPORT ALWAYS_INLINE float load_float ( const int32_t *  agg)

Definition at line 1483 of file RuntimeFunctions.cpp.

1483  {
1484  return *reinterpret_cast<const float*>(may_alias_ptr(agg));
1485 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 313 of file RuntimeFunctions.cpp.

315  {
316  if (lhs == null_val) {
317  return rhs == 0 ? rhs : null_val;
318  }
319  if (rhs == null_val) {
320  return lhs == 0 ? lhs : null_val;
321  }
322  return (lhs && rhs) ? 1 : 0;
323 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not ( const int8_t  operand,
const int8_t  null_val 
)

Definition at line 308 of file RuntimeFunctions.cpp.

309  {
310  return operand == null_val ? operand : (operand ? 0 : 1);
311 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 325 of file RuntimeFunctions.cpp.

327  {
328  if (lhs == null_val) {
329  return rhs == 0 ? null_val : rhs;
330  }
331  if (rhs == null_val) {
332  return lhs == 0 ? null_val : lhs;
333  }
334  return (lhs || rhs) ? 1 : 0;
335 }
ALWAYS_INLINE DEVICE int32_t map_string_dict_id ( const int32_t  string_id,
const int64_t  translation_map_handle,
const int32_t  min_source_id 
)

Definition at line 1332 of file RuntimeFunctions.cpp.

1334  {
1335  const int32_t* translation_map =
1336  reinterpret_cast<const int32_t*>(translation_map_handle);
1337  return translation_map[string_id - min_source_id];
1338 }
RUNTIME_EXPORT void multifrag_query ( const int8_t ***  col_buffers,
const uint64_t *  num_fragments,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
int32_t *  total_matched,
const int64_t *  init_agg_value,
int64_t **  out,
int32_t *  error_code,
const uint32_t *  num_tables_ptr,
const int64_t *  join_hash_tables 
)

Definition at line 1588 of file RuntimeFunctions.cpp.

References query_stub().

1598  {
1599  for (uint32_t i = 0; i < *num_fragments; ++i) {
1600  query_stub(col_buffers ? col_buffers[i] : nullptr,
1601  &num_rows[i * (*num_tables_ptr)],
1602  &frag_row_offsets[i * (*num_tables_ptr)],
1603  max_matched,
1604  init_agg_value,
1605  out,
1606  i,
1607  join_hash_tables,
1608  total_matched,
1609  error_code);
1610  }
1611 }
RUNTIME_EXPORT NEVER_INLINE void query_stub(const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)

+ Here is the call graph for this function:

RUNTIME_EXPORT void multifrag_query_hoisted_literals ( const int8_t ***  col_buffers,
const uint64_t *  num_fragments,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
int32_t *  total_matched,
const int64_t *  init_agg_value,
int64_t **  out,
int32_t *  error_code,
const uint32_t *  num_tables_ptr,
const int64_t *  join_hash_tables 
)

Definition at line 1544 of file RuntimeFunctions.cpp.

References query_stub_hoisted_literals().

1556  {
1557  for (uint32_t i = 0; i < *num_fragments; ++i) {
1558  query_stub_hoisted_literals(col_buffers ? col_buffers[i] : nullptr,
1559  literals,
1560  &num_rows[i * (*num_tables_ptr)],
1561  &frag_row_offsets[i * (*num_tables_ptr)],
1562  max_matched,
1563  init_agg_value,
1564  out,
1565  i,
1566  join_hash_tables,
1567  total_matched,
1568  error_code);
1569  }
1570 }
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals(const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, int64_t **out, uint32_t frag_idx, const int64_t *join_hash_tables, int32_t *error_code, int32_t *total_matched)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 1473 of file RuntimeFunctions.cpp.

1475  {
1476  return reinterpret_cast<const double*>(output_buff)[pos];
1477 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl ( int32_t *  error_code)

Definition at line 969 of file RuntimeFunctions.cpp.

969  {
970  int32_t row_index_resume{0};
971  if (error_code) {
972  row_index_resume = error_code[0];
973  error_code[0] = 0;
974  }
975  return row_index_resume;
976 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ( )

Definition at line 982 of file RuntimeFunctions.cpp.

982  {
983  return 1;
984 }
RUNTIME_EXPORT NEVER_INLINE void query_stub ( const int8_t **  col_buffers,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
int64_t **  out,
uint32_t  frag_idx,
const int64_t *  join_hash_tables,
int32_t *  error_code,
int32_t *  total_matched 
)

Definition at line 1572 of file RuntimeFunctions.cpp.

Referenced by multifrag_query().

1581  {
1582 #ifndef _WIN32
1583  assert(col_buffers || num_rows || frag_row_offsets || max_matched || init_agg_value ||
1584  out || frag_idx || error_code || join_hash_tables || total_matched);
1585 #endif
1586 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals ( const int8_t **  col_buffers,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
int64_t **  out,
uint32_t  frag_idx,
const int64_t *  join_hash_tables,
int32_t *  error_code,
int32_t *  total_matched 
)

Definition at line 1525 of file RuntimeFunctions.cpp.

Referenced by multifrag_query_hoisted_literals().

1536  {
1537 #ifndef _WIN32
1538  assert(col_buffers || literals || num_rows || frag_row_offsets || max_matched ||
1539  init_agg_value || out || frag_idx || error_code || join_hash_tables ||
1540  total_matched);
1541 #endif
1542 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void record_error_code ( const int32_t  err_code,
int32_t *  error_codes 
)

Definition at line 1004 of file RuntimeFunctions.cpp.

References pos_start_impl().

1005  {
1006  // NB: never override persistent error codes (with code greater than zero).
1007  // On GPU, a projection query with a limit can run out of slots without it
1008  // being an actual error if the limit has been hit. If a persistent error
1009  // (division by zero, for example) occurs before running out of slots, we
1010  // have to avoid overriding it, because there's a risk that the query would
1011  // go through if we override with a potentially benign out-of-slots code.
1012  if (err_code && error_codes[pos_start_impl(nullptr)] <= 0) {
1013  error_codes[pos_start_impl(nullptr)] = err_code;
1014  }
1015 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t row_number_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 1469 of file RuntimeFunctions.cpp.

1469  {
1470  return reinterpret_cast<const int64_t*>(output_buff)[pos];
1471 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool sample_ratio ( const double  proportion,
const int64_t  row_offset 
)

Definition at line 1340 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit().

1342  {
1343  const int64_t threshold = 4294967296 * proportion;
1344  return (row_offset * 2654435761) % 4294967296 < threshold;
1345 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_not_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 200 of file RuntimeFunctions.cpp.

202  {
203  int64_t tmp = scale >> 1;
204  tmp = operand >= 0 ? operand + tmp : operand - tmp;
205  return tmp / scale;
206 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 186 of file RuntimeFunctions.cpp.

188  {
189  // rounded scale down of a decimal
190  if (operand == null_val) {
191  return null_val;
192  }
193 
194  int64_t tmp = scale >> 1;
195  tmp = operand >= 0 ? operand + tmp : operand - tmp;
196  return tmp / scale;
197 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_up ( const int64_t  operand,
const uint64_t  scale,
const int64_t  operand_null_val,
const int64_t  result_null_val 
)

Definition at line 178 of file RuntimeFunctions.cpp.

181  {
182  return operand != operand_null_val ? operand * scale : result_null_val;
183 }
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  entry_count 
)

Definition at line 1243 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

1247  {
1248  if (groups_buffer[hashed_index] == EMPTY_KEY_64) {
1249  for (uint32_t i = 0; i < key_count; i++) {
1250  groups_buffer[i * entry_count + hashed_index] = key[i];
1251  }
1252  }
1253 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE uint64_t string_pack ( const int8_t *  ptr,
const int32_t  len 
)

Definition at line 1299 of file RuntimeFunctions.cpp.

1300  {
1301  return (reinterpret_cast<const uint64_t>(ptr) & 0xffffffffffff) |
1302  (static_cast<const uint64_t>(len) << 48);
1303 }
GPU_RT_STUB void sync_threadblock ( )

Definition at line 962 of file RuntimeFunctions.cpp.

962 {}
GPU_RT_STUB void sync_warp ( )

Definition at line 960 of file RuntimeFunctions.cpp.

960 {}
GPU_RT_STUB void sync_warp_protected ( int64_t  thread_pos,
int64_t  row_count 
)

Definition at line 961 of file RuntimeFunctions.cpp.

961 {}
GPU_RT_STUB int8_t thread_warp_idx ( const int8_t  warp_sz)

Definition at line 986 of file RuntimeFunctions.cpp.

986  {
987  return 0;
988 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 1348 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit(), width_bucket_expr(), and width_bucket_nullable().

1352  {
1353  if (target_value < lower_bound) {
1354  return 0;
1355  } else if (target_value >= upper_bound) {
1356  return partition_count + 1;
1357  }
1358  return ((target_value - lower_bound) * scale_factor) + 1;
1359 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 1421 of file RuntimeFunctions.cpp.

References width_bucket(), and width_bucket_reversed().

Referenced by CodeGenerator::codegen(), getExpressionRange(), ScalarExprVisitor< std::unordered_set< InputColDescriptor > >::visit(), and width_bucket_expr_nullable().

1425  {
1426  if (reversed) {
1427  return width_bucket_reversed(target_value,
1428  lower_bound,
1429  upper_bound,
1430  partition_count / (lower_bound - upper_bound),
1431  partition_count);
1432  }
1433  return width_bucket(target_value,
1434  lower_bound,
1435  upper_bound,
1436  partition_count / (upper_bound - lower_bound),
1437  partition_count);
1438 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_no_oob_check ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 1455 of file RuntimeFunctions.cpp.

References width_bucket_no_oob_check(), and width_bucket_reversed_no_oob_check().

1459  {
1460  if (reversed) {
1462  target_value, lower_bound, partition_count / (lower_bound - upper_bound));
1463  }
1465  target_value, lower_bound, partition_count / (upper_bound - lower_bound));
1466 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_nullable ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 1441 of file RuntimeFunctions.cpp.

References width_bucket_expr().

1446  {
1447  if (target_value == null_val) {
1448  return INT32_MIN;
1449  }
1450  return width_bucket_expr(
1451  target_value, reversed, lower_bound, upper_bound, partition_count);
1452 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr(const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 1407 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

1409  {
1410  return ((target_value - lower_bound) * scale_factor) + 1;
1411 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 1376 of file RuntimeFunctions.cpp.

References width_bucket().

1381  {
1382  if (target_value == null_val) {
1383  return INT32_MIN;
1384  }
1385  return width_bucket(
1386  target_value, lower_bound, upper_bound, scale_factor, partition_count);
1387 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 1362 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr(), and width_bucket_reversed_nullable().

1366  {
1367  if (target_value > lower_bound) {
1368  return 0;
1369  } else if (target_value <= upper_bound) {
1370  return partition_count + 1;
1371  }
1372  return ((lower_bound - target_value) * scale_factor) + 1;
1373 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 1414 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

1416  {
1417  return ((lower_bound - target_value) * scale_factor) + 1;
1418 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_reversed_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 1390 of file RuntimeFunctions.cpp.

References width_bucket_reversed().

1395  {
1396  if (target_value == null_val) {
1397  return INT32_MIN;
1398  }
1399  return width_bucket_reversed(
1400  target_value, lower_bound, upper_bound, scale_factor, partition_count);
1401 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

GPU_RT_STUB void write_back_non_grouped_agg ( int64_t *  input_buffer,
int64_t *  output_buffer,
const int32_t  num_agg_cols 
)

Definition at line 964 of file RuntimeFunctions.cpp.

966  {};
RUNTIME_EXPORT NEVER_INLINE void write_back_nop ( int64_t *  dest,
int64_t *  src,
const int32_t  sz 
)

Definition at line 1029 of file RuntimeFunctions.cpp.

1031  {
1032 #ifndef _WIN32
1033  // the body is not really needed, just make sure the call is not optimized away
1034  assert(dest);
1035 #endif
1036 }