make style

This commit is contained in:
Philippe Teuwen 2020-05-14 01:04:26 +02:00
commit ffdd338dea
8 changed files with 132 additions and 131 deletions

View file

@ -1398,14 +1398,14 @@ void ReaderHitag(hitag_function htf, hitag_data *htd) {
// Tag specific configuration settings (sof, timings, etc.) // Tag specific configuration settings (sof, timings, etc.)
// TODO HTS // TODO HTS
/* if (htf <= HTS_LAST_CMD) { /* if (htf <= HTS_LAST_CMD) {
// hitagS settings // hitagS settings
t_wait_1 = 204; t_wait_1 = 204;
t_wait_2 = 128; t_wait_2 = 128;
flipped_bit = 0; flipped_bit = 0;
tag_size = 8; tag_size = 8;
DBG DbpString("Configured for hitagS reader"); DBG DbpString("Configured for hitagS reader");
} else */ } else */
if (htf <= HT1_LAST_CMD) { if (htf <= HT1_LAST_CMD) {
// hitag1 settings // hitag1 settings
t_wait_1 = 204; t_wait_1 = 204;
@ -1724,24 +1724,24 @@ void WriterHitag(hitag_function htf, hitag_data *htd, int page) {
// Tag specific configuration settings (sof, timings, etc.) // Tag specific configuration settings (sof, timings, etc.)
// TODO HTS // TODO HTS
/* if (htf <= HTS_LAST_CMD) { /* if (htf <= HTS_LAST_CMD) {
// hitagS settings // hitagS settings
t_wait_1 = 204; t_wait_1 = 204;
t_wait_2 = 128; t_wait_2 = 128;
//tag_size = 256; //tag_size = 256;
flipped_bit = 0; flipped_bit = 0;
tag_size = 8; tag_size = 8;
DbpString("Configured for hitagS writer"); DbpString("Configured for hitagS writer");
} else */ } else */
// TODO HT1 // TODO HT1
/* if (htf <= HT1_LAST_CMD) { /* if (htf <= HT1_LAST_CMD) {
// hitag1 settings // hitag1 settings
t_wait_1 = 204; t_wait_1 = 204;
t_wait_2 = 128; t_wait_2 = 128;
tag_size = 256; tag_size = 256;
flipped_bit = 0; flipped_bit = 0;
DbpString("Configured for hitag1 writer"); DbpString("Configured for hitag1 writer");
} else */ } else */
// if (htf <= HT2_LAST_CMD) { // if (htf <= HT2_LAST_CMD) {
// hitag2 settings // hitag2 settings
t_wait_1 = HITAG_T_WAIT_1_MIN; t_wait_1 = HITAG_T_WAIT_1_MIN;

View file

@ -1715,7 +1715,7 @@ void MifareChkKeys(uint8_t *datain) {
bool clearTrace = datain[2]; bool clearTrace = datain[2];
uint16_t key_count = (datain[3] << 8) | datain[4]; uint16_t key_count = (datain[3] << 8) | datain[4];
uint16_t key_mem_available = MIN( (PM3_CMD_DATA_SIZE - 5) , key_count * 6); uint16_t key_mem_available = MIN((PM3_CMD_DATA_SIZE - 5), key_count * 6);
key_count = key_mem_available / 6; key_count = key_mem_available / 6;
datain += 5; datain += 5;

View file

@ -564,13 +564,13 @@ static SIMDExecInstr GetSIMDInstr(void) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2; if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
else if (__builtin_cpu_supports("avx")) instr = SIMD_AVX; else if (__builtin_cpu_supports("avx")) instr = SIMD_AVX;
else if (__builtin_cpu_supports("sse2")) instr = SIMD_SSE2; else if (__builtin_cpu_supports("sse2")) instr = SIMD_SSE2;
else if (__builtin_cpu_supports("mmx")) instr = SIMD_MMX; else if (__builtin_cpu_supports("mmx")) instr = SIMD_MMX;
else else
#endif #endif
instr = SIMD_NONE; instr = SIMD_NONE;
return instr; return instr;
} }

View file

@ -311,13 +311,13 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2; if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX; else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2; else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX; else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
else else
#endif #endif
malloc_bitarray_function_p = &malloc_bitarray_NOSIMD; malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*malloc_bitarray_function_p)(x); return (*malloc_bitarray_function_p)(x);
@ -329,13 +329,13 @@ void free_bitarray_dispatch(uint32_t *x) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2; if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX; else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2; else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX; else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
else else
#endif #endif
free_bitarray_function_p = &free_bitarray_NOSIMD; free_bitarray_function_p = &free_bitarray_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
(*free_bitarray_function_p)(x); (*free_bitarray_function_p)(x);
@ -347,13 +347,13 @@ uint32_t bitcount_dispatch(uint32_t a) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2; if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX; else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2; else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX; else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
else else
#endif #endif
bitcount_function_p = &bitcount_NOSIMD; bitcount_function_p = &bitcount_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*bitcount_function_p)(a); return (*bitcount_function_p)(a);
@ -365,13 +365,13 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2; if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX; else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2; else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX; else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
else else
#endif #endif
count_states_function_p = &count_states_NOSIMD; count_states_function_p = &count_states_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*count_states_function_p)(bitarray); return (*count_states_function_p)(bitarray);
@ -383,13 +383,13 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2; if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX; else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2; else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX; else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
else else
#endif #endif
bitarray_AND_function_p = &bitarray_AND_NOSIMD; bitarray_AND_function_p = &bitarray_AND_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
(*bitarray_AND_function_p)(A, B); (*bitarray_AND_function_p)(A, B);
@ -401,13 +401,13 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2; if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX; else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2; else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX; else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
else else
#endif #endif
bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD; bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
(*bitarray_low20_AND_function_p)(A, B); (*bitarray_low20_AND_function_p)(A, B);
@ -419,13 +419,13 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2; if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX; else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
else else
#endif #endif
count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD; count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*count_bitarray_AND_function_p)(A, B); return (*count_bitarray_AND_function_p)(A, B);
@ -437,13 +437,13 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2; if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX; else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2; else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX; else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
else else
#endif #endif
count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD; count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*count_bitarray_low20_AND_function_p)(A, B); return (*count_bitarray_low20_AND_function_p)(A, B);
@ -455,13 +455,13 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2; if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX; else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2; else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX; else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
else else
#endif #endif
bitarray_AND4_function_p = &bitarray_AND4_NOSIMD; bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
(*bitarray_AND4_function_p)(A, B, C, D); (*bitarray_AND4_function_p)(A, B, C, D);
@ -473,13 +473,13 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2; if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX; else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2; else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX; else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
else else
#endif #endif
bitarray_OR_function_p = &bitarray_OR_NOSIMD; bitarray_OR_function_p = &bitarray_OR_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
(*bitarray_OR_function_p)(A, B); (*bitarray_OR_function_p)(A, B);
@ -491,13 +491,13 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2; if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX; else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
else else
#endif #endif
count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD; count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*count_bitarray_AND2_function_p)(A, B); return (*count_bitarray_AND2_function_p)(A, B);
@ -509,13 +509,13 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2; if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX; else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
else else
#endif #endif
count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD; count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*count_bitarray_AND3_function_p)(A, B, C); return (*count_bitarray_AND3_function_p)(A, B, C);
@ -527,13 +527,13 @@ uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uin
else else
#endif #endif
#if defined(COMPILER_HAS_SIMD) #if defined(COMPILER_HAS_SIMD)
if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2; if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX; else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
else else
#endif #endif
count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD; count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;
// call the most optimized function for this CPU // call the most optimized function for this CPU
return (*count_bitarray_AND4_function_p)(A, B, C, D); return (*count_bitarray_AND4_function_p)(A, B, C, D);

View file

@ -76,10 +76,10 @@ static int usage_lf_nexwatch_sim(void) {
// scramble parity (1234) -> (4231) // scramble parity (1234) -> (4231)
static uint8_t nexwatch_parity_swap(uint8_t parity) { static uint8_t nexwatch_parity_swap(uint8_t parity) {
uint8_t a = (((parity >> 3 ) & 1) ); uint8_t a = (((parity >> 3) & 1));
a |= (((parity >> 1 ) & 1) << 1); a |= (((parity >> 1) & 1) << 1);
a |= (((parity >> 2 ) & 1) << 2); a |= (((parity >> 2) & 1) << 2);
a |= ((parity & 1) << 3); a |= ((parity & 1) << 3);
return a; return a;
} }
// parity check // parity check
@ -119,7 +119,7 @@ static int nexwatch_scamble(NexWatchScramble_t action, uint32_t *id, uint32_t *s
28, 24, 20, 16, 12, 8, 4, 0 28, 24, 20, 16, 12, 8, 4, 0
}; };
switch(action) { switch (action) {
case DESCRAMBLE: { case DESCRAMBLE: {
*id = 0; *id = 0;
for (uint8_t idx = 0; idx < 32; idx++) { for (uint8_t idx = 0; idx < 32; idx++) {
@ -144,7 +144,8 @@ static int nexwatch_scamble(NexWatchScramble_t action, uint32_t *id, uint32_t *s
} }
break; break;
} }
default: break; default:
break;
} }
return PM3_SUCCESS; return PM3_SUCCESS;
} }
@ -227,7 +228,7 @@ int demodNexWatch(void) {
nexwatch_magic_t items[] = { {0xBE, "Quadrakey", 0}, {0x88, "Nexkey", 0} }; nexwatch_magic_t items[] = { {0xBE, "Quadrakey", 0}, {0x88, "Nexkey", 0} };
uint8_t m_idx; uint8_t m_idx;
for ( m_idx = 0; m_idx < ARRAYLEN(items); m_idx++) { for (m_idx = 0; m_idx < ARRAYLEN(items); m_idx++) {
items[m_idx].chk = nexwatch_checksum(items[m_idx].magic, cn, calc_parity); items[m_idx].chk = nexwatch_checksum(items[m_idx].magic, cn, calc_parity);
if (items[m_idx].chk == chk) { if (items[m_idx].chk == chk) {
@ -236,14 +237,14 @@ int demodNexWatch(void) {
} }
// output // output
PrintAndLogEx(SUCCESS, " NexWatch raw id : " _YELLOW_("0x%"PRIx32) , rawid); PrintAndLogEx(SUCCESS, " NexWatch raw id : " _YELLOW_("0x%"PRIx32), rawid);
if (m_idx < ARRAYLEN(items)) { if (m_idx < ARRAYLEN(items)) {
PrintAndLogEx(SUCCESS, " fingerprint : " _GREEN_("%s"), items[m_idx].desc); PrintAndLogEx(SUCCESS, " fingerprint : " _GREEN_("%s"), items[m_idx].desc);
} }
PrintAndLogEx(SUCCESS, " 88bit id : " _YELLOW_("%"PRIu32) " (" _YELLOW_("0x%"PRIx32)")", cn, cn); PrintAndLogEx(SUCCESS, " 88bit id : " _YELLOW_("%"PRIu32) " (" _YELLOW_("0x%"PRIx32)")", cn, cn);
PrintAndLogEx(SUCCESS, " mode : %x", mode); PrintAndLogEx(SUCCESS, " mode : %x", mode);
if ( parity == calc_parity) { if (parity == calc_parity) {
PrintAndLogEx(SUCCESS, " parity : %s (0x%X)", _GREEN_("ok"), parity); PrintAndLogEx(SUCCESS, " parity : %s (0x%X)", _GREEN_("ok"), parity);
} else { } else {
PrintAndLogEx(WARNING, " parity : %s (0x%X != 0x%X)", _RED_("fail"), parity, calc_parity); PrintAndLogEx(WARNING, " parity : %s (0x%X != 0x%X)", _RED_("fail"), parity, calc_parity);

View file

@ -551,7 +551,7 @@ bool OpenProxmark(void *port, bool wait_for_port, int timeout, bool flash_mode,
do { do {
sp = uart_open(portname, speed); sp = uart_open(portname, speed);
msleep(500); msleep(500);
PrintAndLogEx(INPLACE, "% 3i", timeout - openCount -1); PrintAndLogEx(INPLACE, "% 3i", timeout - openCount - 1);
} while (++openCount < timeout && (sp == INVALID_SERIAL_PORT || sp == CLAIMED_SERIAL_PORT)); } while (++openCount < timeout && (sp == INVALID_SERIAL_PORT || sp == CLAIMED_SERIAL_PORT));
} }