mirror of
https://github.com/RfidResearchGroup/proxmark3.git
synced 2025-08-22 22:33:48 -07:00
emrtd info: Split EF_SOD into a separate section (only on offline)
This commit is contained in:
parent
b0f122528d
commit
2679ea0980
1 changed files with 42 additions and 24 deletions
|
@ -1689,8 +1689,8 @@ int infoHF_EMRTD(char *documentnumber, char *dob, char *expiry, bool BAC_availab
|
|||
}
|
||||
|
||||
// Grab the hash list
|
||||
uint8_t dg_hashes[16][64];
|
||||
uint8_t hash_out[64];
|
||||
uint8_t dg_hashes_sod[17][64] = { 0x00 };
|
||||
uint8_t dg_hashes_calc[17][64] = { 0x00 };
|
||||
int hash_algo = 0;
|
||||
|
||||
if (!emrtd_select_and_read(response, &resplen, dg_table[EF_SOD].fileid, ks_enc, ks_mac, ssc, BAC, use_14b)) {
|
||||
|
@ -1699,7 +1699,7 @@ int infoHF_EMRTD(char *documentnumber, char *dob, char *expiry, bool BAC_availab
|
|||
return PM3_ESOFT;
|
||||
}
|
||||
|
||||
res = emrtd_parse_ef_sod_hashes(response, resplen, *dg_hashes, &hash_algo);
|
||||
res = emrtd_parse_ef_sod_hashes(response, resplen, *dg_hashes_sod, &hash_algo);
|
||||
if (res != PM3_SUCCESS) {
|
||||
PrintAndLogEx(ERR, "Failed to read hash list from EF_SOD. Hash checks will fail.");
|
||||
}
|
||||
|
@ -1719,15 +1719,9 @@ int infoHF_EMRTD(char *documentnumber, char *dob, char *expiry, bool BAC_availab
|
|||
PrintAndLogEx(DEBUG, "EF_DG%i hash algo: %i", dg->dgnum, hash_algo);
|
||||
// Check file hash
|
||||
if (hash_algo != -1) {
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash on EF_SOD: %s", dg->dgnum, sprint_hex_inrow(dg_hashes[dg->dgnum], hashalg_table[hash_algo].hashlen));
|
||||
hashalg_table[hash_algo].hasher(response, resplen, hash_out);
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash calc: %s", dg->dgnum, sprint_hex_inrow(hash_out, hashalg_table[hash_algo].hashlen));
|
||||
|
||||
if (memcmp(dg_hashes[dg->dgnum], hash_out, hashalg_table[hash_algo].hashlen) == 0) {
|
||||
PrintAndLogEx(SUCCESS, _GREEN_("Hash verification passed for EF_DG%i."), dg->dgnum);
|
||||
} else {
|
||||
PrintAndLogEx(ERR, _RED_("Hash verification failed for EF_DG%i."), dg->dgnum);
|
||||
}
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash on EF_SOD: %s", dg->dgnum, sprint_hex_inrow(dg_hashes_sod[dg->dgnum], hashalg_table[hash_algo].hashlen));
|
||||
hashalg_table[hash_algo].hasher(response, resplen, dg_hashes_calc[dg->dgnum]);
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash calc: %s", dg->dgnum, sprint_hex_inrow(dg_hashes_calc[dg->dgnum], hashalg_table[hash_algo].hashlen));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1771,8 +1765,8 @@ int infoHF_EMRTD_offline(const char *path) {
|
|||
free(data);
|
||||
|
||||
// Grab the hash list
|
||||
uint8_t dg_hashes[16][64];
|
||||
uint8_t hash_out[64];
|
||||
uint8_t dg_hashes_sod[17][64] = { 0x00 };
|
||||
uint8_t dg_hashes_calc[17][64] = { 0x00 };
|
||||
int hash_algo = 0;
|
||||
|
||||
strcpy(filepath, path);
|
||||
|
@ -1785,7 +1779,7 @@ int infoHF_EMRTD_offline(const char *path) {
|
|||
return PM3_ESOFT;
|
||||
}
|
||||
|
||||
res = emrtd_parse_ef_sod_hashes(data, datalen, *dg_hashes, &hash_algo);
|
||||
res = emrtd_parse_ef_sod_hashes(data, datalen, *dg_hashes_sod, &hash_algo);
|
||||
if (res != PM3_SUCCESS) {
|
||||
PrintAndLogEx(ERR, "Failed to read hash list from EF_SOD. Hash checks will fail.");
|
||||
}
|
||||
|
@ -1810,21 +1804,45 @@ int infoHF_EMRTD_offline(const char *path) {
|
|||
PrintAndLogEx(DEBUG, "EF_DG%i hash algo: %i", dg->dgnum, hash_algo);
|
||||
// Check file hash
|
||||
if (hash_algo != -1) {
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash on EF_SOD: %s", dg->dgnum, sprint_hex_inrow(dg_hashes[dg->dgnum], hashalg_table[hash_algo].hashlen));
|
||||
hashalg_table[hash_algo].hasher(data, datalen, hash_out);
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash calc: %s", dg->dgnum, sprint_hex_inrow(hash_out, hashalg_table[hash_algo].hashlen));
|
||||
|
||||
if (memcmp(dg_hashes[dg->dgnum], hash_out, hashalg_table[hash_algo].hashlen) == 0) {
|
||||
PrintAndLogEx(SUCCESS, _GREEN_("Hash verification passed for EF_DG%i."), dg->dgnum);
|
||||
} else {
|
||||
PrintAndLogEx(ERR, _RED_("Hash verification failed for EF_DG%i."), dg->dgnum);
|
||||
}
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash on EF_SOD: %s", dg->dgnum, sprint_hex_inrow(dg_hashes_sod[dg->dgnum], hashalg_table[hash_algo].hashlen));
|
||||
hashalg_table[hash_algo].hasher(data, datalen, dg_hashes_calc[dg->dgnum]);
|
||||
PrintAndLogEx(DEBUG, "EF_DG%i hash calc: %s", dg->dgnum, sprint_hex_inrow(dg_hashes_calc[dg->dgnum], hashalg_table[hash_algo].hashlen));
|
||||
}
|
||||
free(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
free(filepath);
|
||||
|
||||
PrintAndLogEx(NORMAL, "");
|
||||
PrintAndLogEx(INFO, "-------------------- " _CYAN_("EF_SOD") " --------------------");
|
||||
|
||||
if (hash_algo == -1) {
|
||||
PrintAndLogEx(SUCCESS, "Hash algorithm: " _YELLOW_("Unknown"));
|
||||
} else {
|
||||
PrintAndLogEx(SUCCESS, "Hash algorithm: " _YELLOW_("%s"), hashalg_table[hash_algo].name);
|
||||
|
||||
uint8_t all_zeroes[64] = { 0x00 };
|
||||
bool calc_all_zero, sod_all_zero, hash_matches;
|
||||
for (int i = 1; i <= 16; i++) {
|
||||
calc_all_zero = (memcmp(dg_hashes_calc[i], all_zeroes, hashalg_table[hash_algo].hashlen) == 0);
|
||||
sod_all_zero = (memcmp(dg_hashes_sod[i], all_zeroes, hashalg_table[hash_algo].hashlen) == 0);
|
||||
hash_matches = (memcmp(dg_hashes_sod[i], dg_hashes_calc[i], hashalg_table[hash_algo].hashlen) == 0);
|
||||
// Ignore files we don't haven't read and lack hashes to
|
||||
if (calc_all_zero == true && sod_all_zero == true) {
|
||||
continue;
|
||||
} else if (calc_all_zero == true) {
|
||||
PrintAndLogEx(SUCCESS, "EF_DG%i: " _YELLOW_("File read access denied, but is in EF_SOD"), i);
|
||||
} else if (sod_all_zero == true) {
|
||||
PrintAndLogEx(SUCCESS, "EF_DG%i: " _YELLOW_("File not in EF_SOD"), i);
|
||||
} else if (hash_matches == false) {
|
||||
PrintAndLogEx(SUCCESS, "EF_DG%i: " _RED_("Invalid"), i);
|
||||
} else {
|
||||
PrintAndLogEx(SUCCESS, "EF_DG%i: " _GREEN_("Valid"), i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return PM3_SUCCESS;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue