mirror of
https://github.com/RfidResearchGroup/proxmark3.git
synced 2025-08-20 05:13:46 -07:00
client: fix mix of spaces & tabs
This commit is contained in:
parent
112411042f
commit
0d9223a547
197 changed files with 49383 additions and 49383 deletions
|
@ -146,16 +146,16 @@ count_bitarray_AND4_t count_bitarray_AND4_AVX512, count_bitarray_AND4_AVX2, coun
|
|||
inline uint32_t *MALLOC_BITARRAY(uint32_t x)
|
||||
{
|
||||
#if defined (_WIN32)
|
||||
return __builtin_assume_aligned(_aligned_malloc((x), __BIGGEST_ALIGNMENT__), __BIGGEST_ALIGNMENT__);
|
||||
return __builtin_assume_aligned(_aligned_malloc((x), __BIGGEST_ALIGNMENT__), __BIGGEST_ALIGNMENT__);
|
||||
#elif defined (__APPLE__)
|
||||
uint32_t *allocated_memory;
|
||||
if (posix_memalign((void**)&allocated_memory, __BIGGEST_ALIGNMENT__, x)) {
|
||||
return NULL;
|
||||
} else {
|
||||
return __builtin_assume_aligned(allocated_memory, __BIGGEST_ALIGNMENT__);
|
||||
}
|
||||
uint32_t *allocated_memory;
|
||||
if (posix_memalign((void**)&allocated_memory, __BIGGEST_ALIGNMENT__, x)) {
|
||||
return NULL;
|
||||
} else {
|
||||
return __builtin_assume_aligned(allocated_memory, __BIGGEST_ALIGNMENT__);
|
||||
}
|
||||
#else
|
||||
return __builtin_assume_aligned(memalign(__BIGGEST_ALIGNMENT__, (x)), __BIGGEST_ALIGNMENT__);
|
||||
return __builtin_assume_aligned(memalign(__BIGGEST_ALIGNMENT__, (x)), __BIGGEST_ALIGNMENT__);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -163,139 +163,139 @@ inline uint32_t *MALLOC_BITARRAY(uint32_t x)
|
|||
inline void FREE_BITARRAY(uint32_t *x)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
_aligned_free(x);
|
||||
_aligned_free(x);
|
||||
#else
|
||||
free(x);
|
||||
free(x);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t BITCOUNT(uint32_t a)
|
||||
{
|
||||
return __builtin_popcountl(a);
|
||||
return __builtin_popcountl(a);
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t COUNT_STATES(uint32_t *A)
|
||||
{
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i]);
|
||||
}
|
||||
return count;
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i]);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
inline void BITARRAY_AND(uint32_t *restrict A, uint32_t *restrict B)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] &= B[i];
|
||||
}
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] &= B[i];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline void BITARRAY_LOW20_AND(uint32_t *restrict A, uint32_t *restrict B)
|
||||
{
|
||||
uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
|
||||
for (uint32_t i = 0; i < (1<<20); i++) {
|
||||
if (!b[i]) {
|
||||
a[i] = 0;
|
||||
}
|
||||
}
|
||||
for (uint32_t i = 0; i < (1<<20); i++) {
|
||||
if (!b[i]) {
|
||||
a[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t COUNT_BITARRAY_AND(uint32_t *restrict A, uint32_t *restrict B)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] &= B[i];
|
||||
count += BITCOUNT(A[i]);
|
||||
}
|
||||
return count;
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] &= B[i];
|
||||
count += BITCOUNT(A[i]);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t COUNT_BITARRAY_LOW20_AND(uint32_t *restrict A, uint32_t *restrict B)
|
||||
{
|
||||
uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
|
||||
for (uint32_t i = 0; i < (1<<20); i++) {
|
||||
if (!b[i]) {
|
||||
a[i] = 0;
|
||||
}
|
||||
count += BITCOUNT(a[i]);
|
||||
}
|
||||
return count;
|
||||
for (uint32_t i = 0; i < (1<<20); i++) {
|
||||
if (!b[i]) {
|
||||
a[i] = 0;
|
||||
}
|
||||
count += BITCOUNT(a[i]);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
inline void BITARRAY_AND4(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C, uint32_t *restrict D)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
|
||||
D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] = B[i] & C[i] & D[i];
|
||||
}
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
|
||||
D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] = B[i] & C[i] & D[i];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline void BITARRAY_OR(uint32_t *restrict A, uint32_t *restrict B)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] |= B[i];
|
||||
}
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
A[i] |= B[i];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t COUNT_BITARRAY_AND2(uint32_t *restrict A, uint32_t *restrict B)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i] & B[i]);
|
||||
}
|
||||
return count;
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i] & B[i]);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t COUNT_BITARRAY_AND3(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i] & B[i] & C[i]);
|
||||
}
|
||||
return count;
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i] & B[i] & C[i]);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
inline uint32_t COUNT_BITARRAY_AND4(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C, uint32_t *restrict D)
|
||||
{
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
|
||||
D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i] & B[i] & C[i] & D[i]);
|
||||
}
|
||||
return count;
|
||||
A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
|
||||
B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
|
||||
C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
|
||||
D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
|
||||
uint32_t count = 0;
|
||||
for (uint32_t i = 0; i < (1<<19); i++) {
|
||||
count += BITCOUNT(A[i] & B[i] & C[i] & D[i]);
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
|
@ -319,20 +319,20 @@ count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dis
|
|||
// determine the available instruction set at runtime and call the correct function
|
||||
uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
|
||||
malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*malloc_bitarray_function_p)(x);
|
||||
|
@ -340,20 +340,20 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
|||
|
||||
void free_bitarray_dispatch(uint32_t *x) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
free_bitarray_function_p = &free_bitarray_NOSIMD;
|
||||
free_bitarray_function_p = &free_bitarray_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
(*free_bitarray_function_p)(x);
|
||||
|
@ -361,20 +361,20 @@ void free_bitarray_dispatch(uint32_t *x) {
|
|||
|
||||
uint32_t bitcount_dispatch(uint32_t a) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
bitcount_function_p = &bitcount_NOSIMD;
|
||||
bitcount_function_p = &bitcount_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*bitcount_function_p)(a);
|
||||
|
@ -382,20 +382,20 @@ uint32_t bitcount_dispatch(uint32_t a) {
|
|||
|
||||
uint32_t count_states_dispatch(uint32_t *bitarray) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
count_states_function_p = &count_states_NOSIMD;
|
||||
count_states_function_p = &count_states_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*count_states_function_p)(bitarray);
|
||||
|
@ -403,20 +403,20 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
|
|||
|
||||
void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
bitarray_AND_function_p = &bitarray_AND_NOSIMD;
|
||||
bitarray_AND_function_p = &bitarray_AND_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
(*bitarray_AND_function_p)(A,B);
|
||||
|
@ -424,20 +424,20 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
|
||||
void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
|
||||
bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
(*bitarray_low20_AND_function_p)(A, B);
|
||||
|
@ -445,20 +445,20 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
|
||||
uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
|
||||
count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*count_bitarray_AND_function_p)(A, B);
|
||||
|
@ -466,20 +466,20 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
|
||||
uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
|
||||
count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*count_bitarray_low20_AND_function_p)(A, B);
|
||||
|
@ -487,20 +487,20 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
|
||||
void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
|
||||
bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
(*bitarray_AND4_function_p)(A, B, C, D);
|
||||
|
@ -508,20 +508,20 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
|
|||
|
||||
void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
bitarray_OR_function_p = &bitarray_OR_NOSIMD;
|
||||
bitarray_OR_function_p = &bitarray_OR_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
(*bitarray_OR_function_p)(A,B);
|
||||
|
@ -529,20 +529,20 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
|||
|
||||
uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
|
||||
count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*count_bitarray_AND2_function_p)(A, B);
|
||||
|
@ -550,20 +550,20 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
|||
|
||||
uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
|
||||
count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*count_bitarray_AND3_function_p)(A, B, C);
|
||||
|
@ -571,20 +571,20 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
|||
|
||||
uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
||||
#if defined (__i386__) || defined (__x86_64__)
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
|
||||
else
|
||||
#endif
|
||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
|
||||
else
|
||||
#endif
|
||||
#endif
|
||||
count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;
|
||||
count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;
|
||||
|
||||
// call the most optimized function for this CPU
|
||||
return (*count_bitarray_AND4_function_p)(A, B, C, D);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue