mirror of
https://github.com/Proxmark/proxmark3.git
synced 2025-07-07 05:31:17 -07:00
make hardnested compile on gcc 4.9.2
plus add Td to gitignore.
This commit is contained in:
parent
8224307338
commit
f950ce1cb9
4 changed files with 65 additions and 0 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -18,6 +18,7 @@
|
|||
usb_cmd.lua
|
||||
version.c
|
||||
client/ui/ui_overlays.h
|
||||
*.Td
|
||||
|
||||
*.exe
|
||||
hardnested_stats.txt
|
||||
|
|
|
@ -69,8 +69,12 @@ static float brute_force_per_second;
|
|||
|
||||
|
||||
static void get_SIMD_instruction_set(char* instruction_set) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) strcpy(instruction_set, "AVX512F");
|
||||
else if (__builtin_cpu_supports("avx2")) strcpy(instruction_set, "AVX2");
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) strcpy(instruction_set, "AVX2");
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) strcpy(instruction_set, "AVX");
|
||||
else if (__builtin_cpu_supports("sse2")) strcpy(instruction_set, "SSE2");
|
||||
else if (__builtin_cpu_supports("mmx")) strcpy(instruction_set, "MMX");
|
||||
|
|
|
@ -534,8 +534,12 @@ bitslice_test_nonces_t *bitslice_test_nonces_function_p = &bitslice_test_nonces_
|
|||
|
||||
// determine the available instruction set at runtime and call the correct function
|
||||
const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
|
||||
|
@ -548,8 +552,12 @@ const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_firs
|
|||
}
|
||||
|
||||
void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
|
||||
|
|
|
@ -304,8 +304,12 @@ count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dis
|
|||
|
||||
// determine the available instruction set at runtime and call the correct function
|
||||
uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
|
||||
|
@ -318,8 +322,12 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
|
|||
}
|
||||
|
||||
void free_bitarray_dispatch(uint32_t *x) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
|
||||
|
@ -332,8 +340,12 @@ void free_bitarray_dispatch(uint32_t *x) {
|
|||
}
|
||||
|
||||
uint32_t bitcount_dispatch(uint32_t a) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
|
||||
|
@ -346,8 +358,12 @@ uint32_t bitcount_dispatch(uint32_t a) {
|
|||
}
|
||||
|
||||
uint32_t count_states_dispatch(uint32_t *bitarray) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
|
||||
|
@ -360,8 +376,12 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
|
|||
}
|
||||
|
||||
void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
|
||||
|
@ -374,8 +394,12 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
}
|
||||
|
||||
void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
|
||||
|
@ -388,8 +412,12 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
}
|
||||
|
||||
uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
|
||||
|
@ -402,8 +430,12 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
}
|
||||
|
||||
uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
|
||||
|
@ -416,8 +448,12 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
|
|||
}
|
||||
|
||||
void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
|
||||
|
@ -430,8 +466,12 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
|
|||
}
|
||||
|
||||
void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
|
||||
|
@ -444,8 +484,12 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
|
|||
}
|
||||
|
||||
uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
|
||||
|
@ -458,8 +502,12 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
|
|||
}
|
||||
|
||||
uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
|
||||
|
@ -472,8 +520,12 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
|
|||
}
|
||||
|
||||
uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
|
||||
#if (__GNUC__ > 4)
|
||||
if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
|
||||
else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||
#else
|
||||
if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
|
||||
#endif
|
||||
else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
|
||||
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
|
||||
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue