fix compile issues on OS X 10.11

OSX 10.11 does not have clock_gettime()
clang <= 8.0.0 has a bug in __builtin_cpu_supports() and it doesn't
function.
see https://llvm.org/bugs/show_bug.cgi?id=25510
This commit is contained in:
marshmellow42 2017-06-08 17:07:14 -04:00
parent b63bd049fc
commit 087c8bf330
4 changed files with 195 additions and 92 deletions

View file

@ -70,6 +70,7 @@ static float brute_force_per_second;
static void get_SIMD_instruction_set(char* instruction_set) {
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) strcpy(instruction_set, "AVX512F");
else if (__builtin_cpu_supports("avx2")) strcpy(instruction_set, "AVX2");
@ -79,7 +80,9 @@ static void get_SIMD_instruction_set(char* instruction_set) {
else if (__builtin_cpu_supports("avx")) strcpy(instruction_set, "AVX");
else if (__builtin_cpu_supports("sse2")) strcpy(instruction_set, "SSE2");
else if (__builtin_cpu_supports("mmx")) strcpy(instruction_set, "MMX");
else strcpy(instruction_set, "unsupported");
else
#endif
strcpy(instruction_set, "unsupported");
}

View file

@ -551,6 +551,7 @@ bitslice_test_nonces_t *bitslice_test_nonces_function_p = &bitslice_test_nonces_
// determine the available instruction set at runtime and call the correct function
const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
else if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
@ -561,6 +562,7 @@ const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_firs
else if (__builtin_cpu_supports("sse2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
else if (__builtin_cpu_supports("mmx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
else
#endif
#endif
crack_states_bitsliced_function_p = &crack_states_bitsliced_NOSIMD;
@ -570,6 +572,7 @@ const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_firs
void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
else if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
@ -580,6 +583,7 @@ void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_t
else if (__builtin_cpu_supports("sse2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
else if (__builtin_cpu_supports("mmx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
else
#endif
#endif
bitslice_test_nonces_function_p = &bitslice_test_nonces_NOSIMD;

View file

@ -319,6 +319,7 @@ count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dis
// determine the available instruction set at runtime and call the correct function
uint32_t *malloc_bitarray_dispatch(uint32_t x) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
@ -329,6 +330,7 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
else
#endif
#endif
malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
@ -338,6 +340,7 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
void free_bitarray_dispatch(uint32_t *x) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
@ -348,6 +351,7 @@ void free_bitarray_dispatch(uint32_t *x) {
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
else
#endif
#endif
free_bitarray_function_p = &free_bitarray_NOSIMD;
@ -357,6 +361,7 @@ void free_bitarray_dispatch(uint32_t *x) {
uint32_t bitcount_dispatch(uint32_t a) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
@ -367,6 +372,7 @@ uint32_t bitcount_dispatch(uint32_t a) {
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
else
#endif
#endif
bitcount_function_p = &bitcount_NOSIMD;
@ -376,6 +382,7 @@ uint32_t bitcount_dispatch(uint32_t a) {
uint32_t count_states_dispatch(uint32_t *bitarray) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
@ -386,6 +393,7 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
else
#endif
#endif
count_states_function_p = &count_states_NOSIMD;
@ -395,6 +403,7 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
@ -405,6 +414,7 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
else
#endif
#endif
bitarray_AND_function_p = &bitarray_AND_NOSIMD;
@ -414,6 +424,7 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
@ -424,6 +435,7 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
else
#endif
#endif
bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
@ -433,6 +445,7 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
@ -443,6 +456,7 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
else
#endif
#endif
count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
@ -452,6 +466,7 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
@ -462,6 +477,7 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
else
#endif
#endif
count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
@ -471,6 +487,7 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
@ -481,6 +498,7 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
else
#endif
#endif
bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
@ -490,6 +508,7 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
@ -500,6 +519,7 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
else
#endif
#endif
bitarray_OR_function_p = &bitarray_OR_NOSIMD;
@ -509,6 +529,7 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
@ -519,6 +540,7 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
else
#endif
#endif
count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
@ -528,6 +550,7 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
@ -538,6 +561,7 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
else
#endif
#endif
count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
@ -547,6 +571,7 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
@ -557,6 +582,7 @@ uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uin
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
else
#endif
#endif
count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;

View file

@ -35,6 +35,76 @@ void msleep(uint32_t n) {
}
#endif // _WIN32
#ifdef __MACH__
#define CLOCK_MONOTONIC (1)
#define CLOCK_REALTIME (2)
#include <sys/time.h>
#include <mach/clock.h>
#include <mach/mach.h>
#include <mach/mach_time.h>
/* clock_gettime is not implemented on OSX prior to 10.12 */
int _civet_clock_gettime(int clk_id, struct timespec *t);
int _civet_clock_gettime(int clk_id, struct timespec *t)
{
memset(t, 0, sizeof(*t));
if (clk_id == CLOCK_REALTIME) {
struct timeval now;
int rv = gettimeofday(&now, NULL);
if (rv) {
return rv;
}
t->tv_sec = now.tv_sec;
t->tv_nsec = now.tv_usec * 1000;
return 0;
} else if (clk_id == CLOCK_MONOTONIC) {
static uint64_t clock_start_time = 0;
static mach_timebase_info_data_t timebase_ifo = {0, 0};
uint64_t now = mach_absolute_time();
if (clock_start_time == 0) {
//kern_return_t mach_status = mach_timebase_info(&timebase_ifo);
// appease "unused variable" warning for release builds
//(void)mach_status;
clock_start_time = now;
}
now = (uint64_t)((double)(now - clock_start_time)
* (double)timebase_ifo.numer
/ (double)timebase_ifo.denom);
t->tv_sec = now / 1000000000;
t->tv_nsec = now % 1000000000;
return 0;
}
return -1; // EINVAL - Clock ID is unknown
}
/* if clock_gettime is declared, then __CLOCK_AVAILABILITY will be defined */
#ifdef __CLOCK_AVAILABILITY
/* If we compiled with Mac OSX 10.12 or later, then clock_gettime will be declared
* but it may be NULL at runtime. So we need to check before using it. */
int _civet_safe_clock_gettime(int clk_id, struct timespec *t);
int _civet_safe_clock_gettime(int clk_id, struct timespec *t) {
if( clock_gettime ) {
return clock_gettime(clk_id, t);
}
return _civet_clock_gettime(clk_id, t);
}
#define clock_gettime _civet_safe_clock_gettime
#else
#define clock_gettime _civet_clock_gettime
#endif
#endif
// a milliseconds timer for performance measurement
uint64_t msclock() {
#if defined(_WIN32)
@ -47,7 +117,7 @@ uint64_t msclock() {
_ftime(&t);
return 1000 * t.time + t.millitm;
// NORMAL CODE (use _ftime_s)
// NORMAL CODE (use _ftime_s)
//struct _timeb t;
//if (_ftime_s(&t)) {
// return 0;