mirror of
https://github.com/RfidResearchGroup/proxmark3.git
synced 2025-08-19 21:03:48 -07:00
hardnested: more readable preprocessing
This commit is contained in:
parent
5e4c83cc2f
commit
66fd6d70a0
5 changed files with 81 additions and 130 deletions
|
@ -559,19 +559,16 @@ void SetSIMDInstr(SIMDExecInstr instr) {
|
||||||
static SIMDExecInstr GetSIMDInstr(void) {
|
static SIMDExecInstr GetSIMDInstr(void) {
|
||||||
SIMDExecInstr instr = SIMD_NONE;
|
SIMDExecInstr instr = SIMD_NONE;
|
||||||
|
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) instr = SIMD_AVX512;
|
if (__builtin_cpu_supports("avx512f")) instr = SIMD_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) instr = SIMD_AVX;
|
else if (__builtin_cpu_supports("avx")) instr = SIMD_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) instr = SIMD_SSE2;
|
else if (__builtin_cpu_supports("sse2")) instr = SIMD_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) instr = SIMD_MMX;
|
else if (__builtin_cpu_supports("mmx")) instr = SIMD_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
instr = SIMD_NONE;
|
instr = SIMD_NONE;
|
||||||
|
|
||||||
|
@ -589,13 +586,12 @@ SIMDExecInstr GetSIMDInstrAuto(void) {
|
||||||
// determine the available instruction set at runtime and call the correct function
|
// determine the available instruction set at runtime and call the correct function
|
||||||
uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
|
uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
|
||||||
switch (GetSIMDInstrAuto()) {
|
switch (GetSIMDInstrAuto()) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
case SIMD_AVX512:
|
case SIMD_AVX512:
|
||||||
crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
|
crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
case SIMD_AVX2:
|
case SIMD_AVX2:
|
||||||
crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
|
crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
|
||||||
break;
|
break;
|
||||||
|
@ -608,7 +604,6 @@ uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_byte
|
||||||
case SIMD_MMX:
|
case SIMD_MMX:
|
||||||
crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
|
crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
|
||||||
break;
|
break;
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
case SIMD_AUTO:
|
case SIMD_AUTO:
|
||||||
case SIMD_NONE:
|
case SIMD_NONE:
|
||||||
|
@ -622,13 +617,12 @@ uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_byte
|
||||||
|
|
||||||
void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
|
void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
|
||||||
switch (GetSIMDInstrAuto()) {
|
switch (GetSIMDInstrAuto()) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
case SIMD_AVX512:
|
case SIMD_AVX512:
|
||||||
bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
|
bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
case SIMD_AVX2:
|
case SIMD_AVX2:
|
||||||
bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
|
bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
|
||||||
break;
|
break;
|
||||||
|
@ -641,7 +635,6 @@ void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_t
|
||||||
case SIMD_MMX:
|
case SIMD_MMX:
|
||||||
bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
|
bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
|
||||||
break;
|
break;
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
case SIMD_AUTO:
|
case SIMD_AUTO:
|
||||||
case SIMD_NONE:
|
case SIMD_NONE:
|
||||||
|
|
|
@ -52,19 +52,25 @@ THE SOFTWARE.
|
||||||
|
|
||||||
#include "hardnested_bruteforce.h" // statelist_t
|
#include "hardnested_bruteforce.h" // statelist_t
|
||||||
|
|
||||||
|
#if ( defined (__i386__) || defined (__x86_64__) ) && \
|
||||||
|
( !defined(__APPLE__) || \
|
||||||
|
(defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) )
|
||||||
|
# define COMPILER_HAS_SIMD
|
||||||
|
# if defined(COMPILER_HAS_SIMD) && ((__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2))
|
||||||
|
# define COMPILER_HAS_SIMD_AVX512
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
SIMD_AUTO,
|
SIMD_AUTO,
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
SIMD_AVX512,
|
SIMD_AVX512,
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
SIMD_AVX2,
|
SIMD_AVX2,
|
||||||
SIMD_AVX,
|
SIMD_AVX,
|
||||||
SIMD_SSE2,
|
SIMD_SSE2,
|
||||||
SIMD_MMX,
|
SIMD_MMX,
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
SIMD_NONE,
|
SIMD_NONE,
|
||||||
} SIMDExecInstr;
|
} SIMDExecInstr;
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
//
|
//
|
||||||
|
|
||||||
#include "hardnested_bitarray_core.h"
|
#include "hardnested_bitarray_core.h"
|
||||||
|
#include "hardnested_bf_core.h"
|
||||||
|
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
@ -305,19 +306,16 @@ count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dis
|
||||||
|
|
||||||
// determine the available instruction set at runtime and call the correct function
|
// determine the available instruction set at runtime and call the correct function
|
||||||
uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
|
if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
|
else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
|
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
|
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
|
malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
|
||||||
|
|
||||||
|
@ -326,19 +324,16 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void free_bitarray_dispatch(uint32_t *x) {
|
void free_bitarray_dispatch(uint32_t *x) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
|
if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
|
else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
|
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
|
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
free_bitarray_function_p = &free_bitarray_NOSIMD;
|
free_bitarray_function_p = &free_bitarray_NOSIMD;
|
||||||
|
|
||||||
|
@ -347,19 +342,16 @@ void free_bitarray_dispatch(uint32_t *x) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t bitcount_dispatch(uint32_t a) {
|
uint32_t bitcount_dispatch(uint32_t a) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
|
if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
|
else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
|
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
|
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
bitcount_function_p = &bitcount_NOSIMD;
|
bitcount_function_p = &bitcount_NOSIMD;
|
||||||
|
|
||||||
|
@ -368,19 +360,16 @@ uint32_t bitcount_dispatch(uint32_t a) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t count_states_dispatch(uint32_t *bitarray) {
|
uint32_t count_states_dispatch(uint32_t *bitarray) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
|
if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
|
else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
|
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
|
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
count_states_function_p = &count_states_NOSIMD;
|
count_states_function_p = &count_states_NOSIMD;
|
||||||
|
|
||||||
|
@ -389,19 +378,16 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
|
if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
|
else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
|
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
|
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
bitarray_AND_function_p = &bitarray_AND_NOSIMD;
|
bitarray_AND_function_p = &bitarray_AND_NOSIMD;
|
||||||
|
|
||||||
|
@ -410,19 +396,16 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
|
if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
|
else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
|
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
|
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
|
bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
|
||||||
|
|
||||||
|
@ -431,19 +414,16 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
|
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
|
else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
|
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
|
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
|
count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
|
||||||
|
|
||||||
|
@ -452,19 +432,16 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
|
if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
|
else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
|
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
|
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
|
count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
|
||||||
|
|
||||||
|
@ -473,19 +450,16 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
|
if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
|
else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
|
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
|
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
|
bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
|
||||||
|
|
||||||
|
@ -494,19 +468,16 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
|
||||||
}
|
}
|
||||||
|
|
||||||
void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
|
if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
|
else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
|
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
|
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
bitarray_OR_function_p = &bitarray_OR_NOSIMD;
|
bitarray_OR_function_p = &bitarray_OR_NOSIMD;
|
||||||
|
|
||||||
|
@ -515,19 +486,16 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
|
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
|
else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
|
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
|
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
|
count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
|
||||||
|
|
||||||
|
@ -536,19 +504,16 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
|
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
|
else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
|
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
|
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
|
count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
|
||||||
|
|
||||||
|
@ -557,19 +522,16 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
|
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
|
||||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
else
|
||||||
#else
|
|
||||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
|
if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
|
else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
|
||||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
|
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
|
||||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
|
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
|
||||||
else
|
else
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;
|
count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;
|
||||||
|
|
||||||
|
|
|
@ -163,16 +163,14 @@ static int usage_hf14_hardnested(void) {
|
||||||
PrintAndLogEx(NORMAL, " f <name> read/write <name> instead of default name");
|
PrintAndLogEx(NORMAL, " f <name> read/write <name> instead of default name");
|
||||||
PrintAndLogEx(NORMAL, " t tests?");
|
PrintAndLogEx(NORMAL, " t tests?");
|
||||||
PrintAndLogEx(NORMAL, " i <X> set type of SIMD instructions. Without this flag programs autodetect it.");
|
PrintAndLogEx(NORMAL, " i <X> set type of SIMD instructions. Without this flag programs autodetect it.");
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
PrintAndLogEx(NORMAL, " i 5 = AVX512");
|
PrintAndLogEx(NORMAL, " i 5 = AVX512");
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
PrintAndLogEx(NORMAL, " i 2 = AVX2");
|
PrintAndLogEx(NORMAL, " i 2 = AVX2");
|
||||||
PrintAndLogEx(NORMAL, " i a = AVX");
|
PrintAndLogEx(NORMAL, " i a = AVX");
|
||||||
PrintAndLogEx(NORMAL, " i s = SSE2");
|
PrintAndLogEx(NORMAL, " i s = SSE2");
|
||||||
PrintAndLogEx(NORMAL, " i m = MMX");
|
PrintAndLogEx(NORMAL, " i m = MMX");
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
PrintAndLogEx(NORMAL, " i n = none (use CPU regular instruction set)");
|
PrintAndLogEx(NORMAL, " i n = none (use CPU regular instruction set)");
|
||||||
PrintAndLogEx(NORMAL, "");
|
PrintAndLogEx(NORMAL, "");
|
||||||
|
@ -228,15 +226,13 @@ static int usage_hf14_autopwn(void) {
|
||||||
PrintAndLogEx(NORMAL, " * 2 = 2k");
|
PrintAndLogEx(NORMAL, " * 2 = 2k");
|
||||||
PrintAndLogEx(NORMAL, " * 4 = 4k");
|
PrintAndLogEx(NORMAL, " * 4 = 4k");
|
||||||
PrintAndLogEx(NORMAL, " i <simd type> set type of SIMD instructions for hardnested. Default: autodetection.");
|
PrintAndLogEx(NORMAL, " i <simd type> set type of SIMD instructions for hardnested. Default: autodetection.");
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
PrintAndLogEx(NORMAL, " i 5 = AVX512");
|
PrintAndLogEx(NORMAL, " i 5 = AVX512");
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
PrintAndLogEx(NORMAL, " i 2 = AVX2");
|
PrintAndLogEx(NORMAL, " i 2 = AVX2");
|
||||||
PrintAndLogEx(NORMAL, " i a = AVX");
|
PrintAndLogEx(NORMAL, " i a = AVX");
|
||||||
PrintAndLogEx(NORMAL, " i s = SSE2");
|
PrintAndLogEx(NORMAL, " i s = SSE2");
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
PrintAndLogEx(NORMAL, " i m = MMX");
|
PrintAndLogEx(NORMAL, " i m = MMX");
|
||||||
PrintAndLogEx(NORMAL, " i n = none (use CPU regular instruction set)");
|
PrintAndLogEx(NORMAL, " i n = none (use CPU regular instruction set)");
|
||||||
|
@ -1817,13 +1813,12 @@ static int CmdHF14AMfNestedHard(const char *Cmd) {
|
||||||
SetSIMDInstr(SIMD_AUTO);
|
SetSIMDInstr(SIMD_AUTO);
|
||||||
ctmp = tolower(param_getchar(Cmd, cmdp + 1));
|
ctmp = tolower(param_getchar(Cmd, cmdp + 1));
|
||||||
switch (ctmp) {
|
switch (ctmp) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
case '5':
|
case '5':
|
||||||
SetSIMDInstr(SIMD_AVX512);
|
SetSIMDInstr(SIMD_AVX512);
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
case '2':
|
case '2':
|
||||||
SetSIMDInstr(SIMD_AVX2);
|
SetSIMDInstr(SIMD_AVX2);
|
||||||
break;
|
break;
|
||||||
|
@ -1836,7 +1831,6 @@ static int CmdHF14AMfNestedHard(const char *Cmd) {
|
||||||
case 'm':
|
case 'm':
|
||||||
SetSIMDInstr(SIMD_MMX);
|
SetSIMDInstr(SIMD_MMX);
|
||||||
break;
|
break;
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
case 'n':
|
case 'n':
|
||||||
SetSIMDInstr(SIMD_NONE);
|
SetSIMDInstr(SIMD_NONE);
|
||||||
|
@ -2004,13 +1998,12 @@ static int CmdHF14AMfAutoPWN(const char *Cmd) {
|
||||||
SetSIMDInstr(SIMD_AUTO);
|
SetSIMDInstr(SIMD_AUTO);
|
||||||
ctmp = tolower(param_getchar(Cmd, cmdp + 1));
|
ctmp = tolower(param_getchar(Cmd, cmdp + 1));
|
||||||
switch (ctmp) {
|
switch (ctmp) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
case '5':
|
case '5':
|
||||||
SetSIMDInstr(SIMD_AVX512);
|
SetSIMDInstr(SIMD_AVX512);
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
case '2':
|
case '2':
|
||||||
SetSIMDInstr(SIMD_AVX2);
|
SetSIMDInstr(SIMD_AVX2);
|
||||||
break;
|
break;
|
||||||
|
@ -2023,7 +2016,6 @@ static int CmdHF14AMfAutoPWN(const char *Cmd) {
|
||||||
case 'm':
|
case 'm':
|
||||||
SetSIMDInstr(SIMD_MMX);
|
SetSIMDInstr(SIMD_MMX);
|
||||||
break;
|
break;
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
case 'n':
|
case 'n':
|
||||||
SetSIMDInstr(SIMD_NONE);
|
SetSIMDInstr(SIMD_NONE);
|
||||||
|
|
|
@ -75,13 +75,12 @@ static float brute_force_per_second;
|
||||||
|
|
||||||
static void get_SIMD_instruction_set(char *instruction_set) {
|
static void get_SIMD_instruction_set(char *instruction_set) {
|
||||||
switch (GetSIMDInstrAuto()) {
|
switch (GetSIMDInstrAuto()) {
|
||||||
#if defined (__i386__) || defined (__x86_64__)
|
#if defined(COMPILER_HAS_SIMD_AVX512)
|
||||||
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
|
|
||||||
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
|
|
||||||
case SIMD_AVX512:
|
case SIMD_AVX512:
|
||||||
strcpy(instruction_set, "AVX512F");
|
strcpy(instruction_set, "AVX512F");
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(COMPILER_HAS_SIMD)
|
||||||
case SIMD_AVX2:
|
case SIMD_AVX2:
|
||||||
strcpy(instruction_set, "AVX2");
|
strcpy(instruction_set, "AVX2");
|
||||||
break;
|
break;
|
||||||
|
@ -94,7 +93,6 @@ static void get_SIMD_instruction_set(char *instruction_set) {
|
||||||
case SIMD_MMX:
|
case SIMD_MMX:
|
||||||
strcpy(instruction_set, "MMX");
|
strcpy(instruction_set, "MMX");
|
||||||
break;
|
break;
|
||||||
#endif
|
|
||||||
#endif
|
#endif
|
||||||
case SIMD_AUTO:
|
case SIMD_AUTO:
|
||||||
case SIMD_NONE:
|
case SIMD_NONE:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue