Commit e639f9332b8864400465718ecb706a47c2aeee71

Authored by Brice Colombier
0 parents
Exists in master

Add incremental feature

Showing 6 changed files with 690 additions and 0 deletions

... ... @@ -0,0 +1,6 @@
  1 +*.npy
  2 +*.py~
  3 +*.pyc
  4 +*.m~
  5 +plots/
  6 +*/__init__.py
0 7 \ No newline at end of file
1st_order_CPA.py View file @ e639f93
... ... @@ -0,0 +1,214 @@
  1 +# coding: utf8
  2 +
  3 +import numpy as np
  4 +from scipy.stats.stats import pearsonr
  5 +import matplotlib.pyplot as plt
  6 +import matplotlib.ticker as ticker
  7 +import logging as log
  8 +import scipy.io as sio
  9 +from random import randint
  10 +
  11 +import argparse
  12 +import sys
  13 +sys.path.append('./correlation')
  14 +import corr as corr
  15 +
  16 +import os
  17 +
  18 +log.basicConfig(format="%(levelname)s: %(message)s", level=log.INFO)
  19 +
  20 +# Hamming weight array
  21 +HW_array = np.array([str(bin(byte)).count('1') for byte in range(256)], dtype='uint8')
  22 +
  23 +nb_bytes = 16
  24 +nb_k_hyp = 256
  25 +
  26 +Sbox_hex = [
  27 + 0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
  28 + 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
  29 + 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
  30 + 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
  31 + 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
  32 + 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
  33 + 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
  34 + 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
  35 + 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
  36 + 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
  37 + 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
  38 + 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
  39 + 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
  40 + 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
  41 + 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
  42 + 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16]
  43 +
  44 +Sbox_dec = np.array([int(s) for s in Sbox_hex])
  45 +
  46 +inv_Sbox_hex = [
  47 + 0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,
  48 + 0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,
  49 + 0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,
  50 + 0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,
  51 + 0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,
  52 + 0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,
  53 + 0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,
  54 + 0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,
  55 + 0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,
  56 + 0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,
  57 + 0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,
  58 + 0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,
  59 + 0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,
  60 + 0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,
  61 + 0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,
  62 + 0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D]
  63 +
  64 +inv_Sbox_dec = np.array([int(s) for s in inv_Sbox_hex])
  65 +
  66 +def compute_predictions(nb_traces, plaintexts_filename):
  67 + k_hyps = np.array(range(nb_k_hyp)) #0 to 255
  68 +
  69 + if nb_traces == -1:
  70 + plaintexts = np.load(os.path.join(plaintexts_filename))
  71 + else:
  72 + plaintexts = np.load(os.path.join(plaintexts_filename))[:nb_traces,:]
  73 + ref_value = np.zeros((np.shape(plaintexts)[0], np.shape(plaintexts)[1], nb_k_hyp), dtype='uint8')
  74 + predictions = HW_array[np.bitwise_xor(Sbox_dec[np.bitwise_xor(plaintexts[:, :, np.newaxis], k_hyps)], ref_value)]
  75 + np.save('predictions.npy', predictions)
  76 + log.info("Predictions for intermediate value computed")
  77 +
  78 +def compute_correlation(nb_traces, traces_filename, step, predictions_filename='predictions.npy'):
  79 + predictions = np.load(predictions_filename)
  80 + log.info("Loaded predictions matrix of type {0} and size {1}".format(predictions.dtype, np.shape(predictions)))
  81 + if nb_traces == -1:
  82 + traces = np.load(os.path.join(traces_filename))
  83 + else:
  84 + traces = np.load(os.path.join(traces_filename))[:nb_traces,:]
  85 + log.info("Loaded traces ("+traces_filename+") matrix of type {0} and size {1}".format(traces.dtype, np.shape(traces)))
  86 + nb_traces, nb_samples = np.shape(traces)
  87 + if step:
  88 + correlation = np.zeros((nb_bytes, nb_traces//step, nb_samples, nb_k_hyp))
  89 + else:
  90 + correlation = np.zeros((nb_bytes, nb_samples, nb_k_hyp))
  91 + for byte in range(nb_bytes):
  92 + log.info("Computing correlation for byte {0}".format(byte))
  93 + if step:
  94 + correlation[byte,:,:,:] = corr.fast_corr(traces, predictions[:,byte,:], step)
  95 + np.save('./correlations/corr_byte_'+str(byte)+'.npy', correlation[byte,:,:,:])
  96 + else:
  97 + correlation[byte,:,:] = corr.fast_corr(traces, predictions[:,byte,:])
  98 + np.save('./correlations/corr_byte_'+str(byte)+'.npy', correlation[byte,:,:])
  99 +
  100 +def display_results(correct_key='0123456789abcdef123456789abcdef0'):
  101 + correct_key = [correct_key[i:i+2] for i in range(0, len(correct_key), 2)]
  102 + guessed_key = ""
  103 + avg_position = 0
  104 + for byte, correct_byte in enumerate(correct_key):
  105 + corr = np.load('./correlations/corr_byte_'+str(byte)+'.npy')
  106 + if len(np.shape(corr)) == 3:
  107 + # Iterative correlation was computed, take only the last ones
  108 + corr = corr[-1,:,:]
  109 + max_corr_per_key_byte = abs(corr).max(axis=0)
  110 + max_corr_samples = abs(corr).max(axis=1)
  111 +
  112 + most_probable_hex_key_byte = hex(np.argmax(max_corr_per_key_byte))[2:].zfill(2)
  113 + sample_of_interest = np.argmax(max_corr_samples)
  114 +
  115 + corr = round(max(max_corr_per_key_byte), 3)
  116 + log.info("=> Most probable key byte #{0} : \"{1}\", at t={2}".format(str(byte).zfill(2), most_probable_hex_key_byte, sample_of_interest))
  117 + position_correct_byte = list(np.sort(max_corr_per_key_byte)[::-1]).index(max_corr_per_key_byte[int(correct_byte, 16)])
  118 + log.info("=> Correct one is \"{0}\", ranked {1}/{2} with ρ={3}".format(correct_byte, position_correct_byte, nb_k_hyp, corr))
  119 + avg_position+=position_correct_byte
  120 + guessed_key+=most_probable_hex_key_byte
  121 + print("=> Guessed key is \"{0}\", average rank is {1}".format(guessed_key, avg_position/nb_bytes))
  122 +
  123 +def plot_results(target_bytes,
  124 + step,
  125 + correlations_path = './correlations'):
  126 + plot_path = "./plots"
  127 + if step:
  128 + for byte in target_bytes:
  129 + log.info("Plotting for byte {0}".format(byte))
  130 + corr = abs(np.load(os.path.join(correlations_path, 'corr_byte_'+str(byte)+'.npy')))
  131 + nb_steps, nb_samples, nb_hyp = np.shape(corr)
  132 + max_corr_per_key_byte = corr.max(axis=1)
  133 + key_byte = np.argmax(max_corr_per_key_byte[-1,:])
  134 + hex_key_byte = hex(key_byte)[2:].zfill(2)
  135 + plt.figure()
  136 + for k_hyp in list(range(nb_hyp)):
  137 + if k_hyp == key_byte:
  138 + plt.plot([step*i for i in range(nb_steps)], max_corr_per_key_byte[:,k_hyp], color='black')
  139 + else:
  140 + plt.plot([step*i for i in range(nb_steps)], max_corr_per_key_byte[:,k_hyp], color='grey', alpha=0.25)
  141 + plt.xlim(0, step*(nb_steps-1))
  142 + plt.ylim(0, 1)
  143 + plt.xlabel("#Traces")
  144 + plt.ylabel("Maximum of correlation per key hypothesis")
  145 + plt.savefig(os.path.join(plot_path, 'max_corr_per_k_hyp_byte_'+str(byte)+'.png'))
  146 + # plt.show()
  147 + plt.close()
  148 + plt.figure()
  149 + key_ranks = []
  150 + for i in range(nb_steps):
  151 + try:
  152 + key_rank = np.where(sorted(max_corr_per_key_byte[i,:])[::-1] == max_corr_per_key_byte[i,key_byte])[0][-1]
  153 + except:
  154 + raise ValueError("Could not compute the key rank, increase the step size !")
  155 + key_ranks.append(key_rank)
  156 + plt.plot([step*i for i in range(nb_steps)], key_ranks)
  157 + plt.xlim(0, step*(nb_steps-1))
  158 + plt.ylim(0, 255)
  159 + plt.xlabel("#Traces")
  160 + plt.ylabel("Key rank")
  161 + plt.savefig(os.path.join(plot_path, 'key_rank_byte_'+str(byte)+'.png'))
  162 + # plt.show()
  163 + plt.close()
  164 + return
  165 + for byte in target_bytes:
  166 + log.info("Plotting for byte {0}".format(byte))
  167 + if step:
  168 + corr = abs(np.load(os.path.join(correlations_path, 'corr_byte_'+str(byte)+'.npy')))[-1,:,:]
  169 + else:
  170 + corr = abs(np.load(os.path.join(correlations_path, 'corr_byte_'+str(byte)+'.npy')))
  171 + nb_samples, nb_hyp = np.shape(corr)
  172 + max_corr_per_key_byte = corr.max(axis=0)
  173 + max_corr_samples = corr.max(axis=1)
  174 + key_byte = np.argmax(max_corr_per_key_byte)
  175 + hex_key_byte = hex(key_byte)[2:].zfill(2)
  176 + sample_of_interest = np.argmax(max_corr_samples)
  177 + plt.figure()
  178 + plt.plot(corr[:,:key_byte], color = 'grey', linewidth =1)
  179 + plt.plot(corr[:,-key_byte:], color = 'grey', linewidth =1)
  180 + plt.plot(corr[:,key_byte], color = 'blue', linewidth =1)
  181 + plt.xlim(0, nb_samples)
  182 + plt.ylim(0, 1)
  183 + plt.xlabel("Samples")
  184 + plt.ylabel("Correlation")
  185 + plt.savefig(os.path.join(plot_path, 'corr_vs_samples_byte_'+str(byte)+'.png'))
  186 + plt.close()
  187 + # plt.show()
  188 + plt.figure()
  189 + plt.plot(corr.T[:,:key_byte], color = 'grey', linewidth =1)
  190 + plt.plot(corr.T[:,-key_byte:], color = 'grey', linewidth =1)
  191 + plt.plot(nb_samples*[key_byte], corr[:,key_byte], color = 'red', linewidth =1)
  192 + plt.xlim(-1,nb_hyp-1)
  193 + plt.ylim(0, 1)
  194 + plt.xlabel("Key hypotheses")
  195 + plt.ylabel("Correlation")
  196 + # plt.show()
  197 + plt.savefig(os.path.join(plot_path, 'corr_vs_k_hyp_byte_'+str(byte)+'.png'))
  198 + plt.close()
  199 +
  200 +if __name__ == "__main__":
  201 +
  202 + parser = argparse.ArgumentParser(description='Preprocess traces')
  203 + parser.add_argument("-n", "--nb_traces", type=int, nargs='?', default=-1)
  204 + parser.add_argument("-p", "--plaintexts_filename", type=str, default='plaintexts.npy')
  205 + parser.add_argument("-t", "--traces_filename", type=str, default='traces.npy')
  206 + parser.add_argument("-i", "--incremental_step", type=int, nargs='?', default=0)
  207 + parser.add_argument("-k", "--correct_key", type=str, nargs='?', default="0123456789abcdef123456789abcdef0")
  208 +
  209 + args = parser.parse_args()
  210 +
  211 + compute_predictions(args.nb_traces, args.plaintexts_filename)
  212 + compute_correlation(args.nb_traces, args.traces_filename, args.incremental_step)
  213 + display_results(args.correct_key)
  214 + plot_results(range(16), args.incremental_step)
2nd_order_CPA.py View file @ e639f93
... ... @@ -0,0 +1,167 @@
  1 +import numpy as np
  2 +from scipy.stats.stats import pearsonr
  3 +import matplotlib.pyplot as plt
  4 +import logging as log
  5 +# import matlab.engine
  6 +import scipy.io as sio
  7 +import time
  8 +import os
  9 +
  10 +from multiprocessing import Pool, current_process
  11 +from functools import partial
  12 +
  13 +import sys
  14 +sys.path.append('./../Correlation')
  15 +import corr as corr
  16 +
  17 +log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
  18 +
  19 +nb_doublets = 8
  20 +nb_k_hyp = 65536
  21 +
  22 +HW = np.array([bin(byte).count('1') for byte in range(256)]).astype(np.uint8)
  23 +
  24 +Sbox_hex = [0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76, 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0, 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15, 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75, 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84, 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF, 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8, 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2, 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73, 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB, 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79, 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08, 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A, 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E, 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF, 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16]
  25 +
  26 +# Precompute the 16-bit SBox (two 2-bit SBoxes side-by-side)
  27 +Sbox_dec_byte = np.array([int(s) for s in Sbox_hex], dtype = np.uint8)
  28 +Sbox_dec = np.array([int(s_high)*256+int(s_low) for s_high in Sbox_hex for s_low in Sbox_hex], dtype = np.uint16)
  29 +
  30 +def compute_prediction(plaintexts_path = './plaintexts',
  31 + predictions_path = './predictions',
  32 + save_mat = False):
  33 + t0 = time.clock()
  34 + for plaintexts_filename in os.listdir(plaintexts_path):
  35 + plaintexts = np.load(os.path.join(plaintexts_path, plaintexts_filename)).astype(np.uint8)
  36 + break
  37 + # plaintexts = np.load(os.path.join(plaintexts_path, 'masked_plaintexts.npy')).astype(np.uint8)
  38 + # Merge plaintext bytes into doublets (16 bits)
  39 + plaintexts = 256*plaintexts[:,::2]+plaintexts[:,1::2]
  40 + # Compute the possible key hypotheses
  41 + k_hyps = np.array(range(nb_k_hyp), dtype=np.uint16) #0 to 65535
  42 + # Apply the leakage model
  43 + for doublet in range(nb_doublets):
  44 + log.info("Computing prediction for doublet {0}".format(doublet))
  45 + prediction = Sbox_dec[np.bitwise_xor(plaintexts[:, doublet, np.newaxis], k_hyps)]
  46 + prediction = HW[np.bitwise_xor(prediction/256, prediction%256).astype(np.uint8)]
  47 + np.save(os.path.join(predictions_path, 'prediction_doublet_'+str(doublet)+'.npy'), prediction)
  48 + if save_mat:
  49 + sio.savemat(os.path.join(predictions_path, 'prediction_doublet_'+str(doublet)+'.mat'), {'prediction':prediction.astype(np.float32)})
  50 + log.info("Predictions computed in {0} s.".format(time.clock()-t0))
  51 +
  52 +
  53 +def compute_prediction_one_doublet(doublet,
  54 + plaintexts_path = './plaintexts',
  55 + predictions_path = './predictions',
  56 + save_mat = False):
  57 + plaintexts_path = './plaintexts'
  58 + predictions_path = './predictions'
  59 + plaintexts = np.load(os.path.join(plaintexts_path, 'masked_plaintexts.npy')).astype(np.uint8)
  60 + # Merge plaintext bytes into doublets (16 bits)
  61 + plaintexts = 256*plaintexts[:,::2]+plaintexts[:,1::2]
  62 + # Compute the possible key hypotheses
  63 + k_hyps = np.array(range(nb_k_hyp), dtype=np.uint16) #0 to 65535
  64 + # Apply the leakage model
  65 + log.info("Computing prediction for doublet {0}".format(doublet))
  66 + prediction = Sbox_dec[np.bitwise_xor(plaintexts[:, doublet, np.newaxis], k_hyps)]
  67 + prediction = HW[np.bitwise_xor(prediction/256, prediction%256).astype(np.uint8)]
  68 + np.save(os.path.join(predictions_path, 'prediction_doublet_'+str(doublet)+'.npy'), prediction)
  69 + if save_mat:
  70 + sio.savemat(os.path.join(predictions_path, 'prediction_doublet_'+str(doublet)+'.mat'), {'prediction':prediction.astype(np.float32)})
  71 +
  72 +def compute_prediction_parallel(ncores = 1,
  73 + plaintexts_path = './plaintexts',
  74 + predictions_path = './predictions',
  75 + save_mat = False):
  76 + pool = Pool(ncores)
  77 + doublets = list(range(8))
  78 + pool.map(partial(compute_prediction_one_doublet, plaintexts_path = plaintexts_path, predictions_path = predictions_path, save_mat=save_mat), doublets)
  79 + log.info("Predictions computed in {0} s.".format(time.clock()-t0))
  80 +
  81 +def compute_correlation(predictions_path = './predictions',
  82 + traces_path = './traces',
  83 + correlations_path = './correlations',
  84 + save_mat = False):
  85 + traces = np.load(os.path.join(traces_path, 'processed_masked_traces.npy'))
  86 + if save_mat:
  87 + sio.savemat(os.path.join(traces_path, 'processed_masked_traces.mat'), {'traces':traces.astype(np.float32)})
  88 + log.info("Loaded {0} traces of {1} samples".format(np.shape(traces)[0], np.shape(traces)[1]))
  89 + if True:
  90 + raise MemoryError('Cannot compute correlation with numpy, use the MATLAB script')
  91 + for doublet in range(1, nb_doublets):
  92 + prediction = np.load(os.path.join(predictions_path, 'prediction_doublet_'+str(doublet)+'.npy'))
  93 + log.info("Computing correlation for doublet {0}".format(doublet))
  94 + correlation = corr.corr(traces, prediction)
  95 + np.save(os.path.join(correlations_path, 'correlation_doublet_'+str(doublet)+'.npy'), correlation)
  96 + if save_mat:
  97 + sio.savemat(os.path.join(correlations_path, 'correlation_doublet_'+str(doublet)+'.mat'), {'correlation':correlation.astype(np.float32)})
  98 +
  99 +def display_results(correct_key,
  100 + correlations_path = './correlations',
  101 + correlation_format = 'mat'):
  102 + # Split key in doublets for display
  103 + correct_key = [correct_key[i:i+4] for i in range(0, len(correct_key), 4)]
  104 + for doublet, correct_doublet in enumerate(correct_key):
  105 + if correlation_format == 'npy':
  106 + corr = np.load('correlations/correlation_doublet_'+str(doublet)+'.npy')
  107 + elif correlation_format == 'mat':
  108 + corr = sio.loadmat('correlations/correlation_doublet_'+str(doublet)+'.mat')['correlation']
  109 + max_correlation_per_key_doublet = abs(corr).max(axis=0)
  110 + max_correlation_indexes = abs(corr).max(axis=1)
  111 + hex_key_doublet = hex(np.argmax(max_correlation_per_key_doublet))[2:-1].zfill(4)
  112 + index_of_interest = np.argmax(max_correlation_indexes)
  113 + print "###\nKey doublet #{0} : \"{1}\", found at index {2}".format(str(doublet).zfill(1), hex_key_doublet.zfill(4), index_of_interest)
  114 + position_correct_doublet = list(np.sort(max_correlation_per_key_doublet)[::-1]).index(max_correlation_per_key_doublet[int(correct_doublet, 16)])
  115 + print " Correct one is \"{0}\", ranked {1}/{2}".format(correct_doublet, position_correct_doublet, nb_k_hyp)
  116 +
  117 +def plot_results(correct_key,
  118 + correlations_path = './correlations',
  119 + plots_path = './plots',
  120 + correlation_format = 'mat',
  121 + save = False):
  122 + correct_key = [correct_key[i:i+4] for i in range(0, len(correct_key), 4)]
  123 + for doublet, correct_doublet in enumerate(correct_key):
  124 + if correlation_format == 'npy':
  125 + corr = np.load('correlations/correlation_doublet_'+str(doublet)+'.npy')
  126 + elif correlation_format == 'mat':
  127 + corr = sio.loadmat('correlations/correlation_doublet_'+str(doublet)+'.mat')['correlation']
  128 + max_correlation_per_key_doublet = abs(corr).max(axis=0)
  129 + max_correlation_indexes = abs(corr).max(axis=1)
  130 + hex_key_doublet = hex(np.argmax(max_correlation_per_key_doublet))[2:-1].zfill(4)
  131 + index_of_interest = np.argmax(max_correlation_indexes)
  132 + for i in range(np.shape(corr)[0]):
  133 + plt.title('Correlation values for all key hypotheses')
  134 + plt.xlabel('Key hypotheses')
  135 + plt.ylabel('Correlation')
  136 + if i == index_of_interest:
  137 + plt.plot(corr[i,:], color='red', alpha=0.5)
  138 + else:
  139 + plt.plot(corr[i,:], color='#000000', alpha=0.25)
  140 + if save:
  141 + plt.savefig(os.path.join(plots_path, 'k_hyp', 'corr_VS_k_hyp_doublet_'+str(doublet)+'.png'))
  142 + else:
  143 + plt.show()
  144 + plt.close()
  145 + for i in range(np.shape(corr)[1]):
  146 + plt.title('Correlation values for all sample combinations')
  147 + plt.xlabel('Sample combinations')
  148 + plt.ylabel('Correlation')
  149 + if i == int(hex_key_doublet, 16):
  150 + plt.plot(corr[:,i], color='red', alpha=0.5)
  151 + else:
  152 + plt.plot(corr[:,i], color='#000000', alpha=0.25)
  153 + if save:
  154 + plt.savefig(os.path.join(plots_path, 'combinations', 'corr_VS_combinations_doublet_'+str(doublet)+'.png'))
  155 + else:
  156 + plt.show()
  157 + plt.close()
  158 +
  159 +if __name__ == "__main__":
  160 + t0 = time.time()
  161 + compute_prediction(save_mat = True)
  162 + # compute_prediction_parallel(ncores = 2, save_mat = True)
  163 + print time.time() - t0
  164 + compute_correlation(save_mat = True)
  165 + # display_results(correct_key = '0123456789abcdef123456789abcdef0')
  166 + # plot_results(correct_key = '0123456789ab', save = True)
  167 + # plot_results(correct_key = '0123456789abcdef123456789abcdef0', save = True)
correlation/compute_corr.m View file @ e639f93
... ... @@ -0,0 +1,22 @@
  1 +path = '../2nd_order/'
  2 +load(strcat(path, 'traces/processed_masked_traces.mat'));
  3 +
  4 +head = {'doublet' 'correlation' 'k_hyp' 'sample'};
  5 +fid = fopen('corr_results.csv', 'w');
  6 +fprintf(fid, '%s,', head{1,1:end-1});
  7 +fprintf(fid, '%s\n', head{1,end});
  8 +fclose(fid);
  9 +
  10 +for doublet=0:7
  11 + doublet
  12 + load(sprintf(strcat(path, 'predictions/prediction_doublet_%s.mat'), int2str(doublet)));
  13 + correlation = corr(traces, prediction);
  14 + correlation = abs(correlation);
  15 + save(sprintf(strcat(path, 'correlations/correlation_doublet_%s.mat'), int2str(doublet)), 'correlation');
  16 + max_k = max(correlation);
  17 + max_s = max(correlation');
  18 + [val_corr_at_k, index_k] = max(max_k)
  19 + [val_corr_at_s, index_s] = max(max_s)
  20 + dlmwrite('corr_results.csv', [doublet val_corr_at_k index_k-1 index_s], '-append');
  21 +end
  22 +exit
0 23 \ No newline at end of file
correlation/corr.py View file @ e639f93
... ... @@ -0,0 +1,70 @@
  1 +# Inputs :
  2 +# - T of size n x t
  3 +# - H of size n x h
  4 +# Output :
  5 +# - matrix of size t x h
  6 +
  7 +import numpy as np
  8 +np.seterr(all='raise')
  9 +
  10 +import time
  11 +
  12 +def corr(A,B):
  13 + # Columnwise mean of input arrays & subtract from input arrays themeselves
  14 + A_mA = A - A.mean(0)[None,:]
  15 + B_mB = B - B.mean(0)[None,:]
  16 +
  17 + # Sum of squares across columns
  18 + ssA = (A_mA**2).sum(0);
  19 + ssB = (B_mB**2).sum(0);
  20 +
  21 + # Finally get corr coeff
  22 + prod_mean = np.dot(A_mA.T,B_mB)
  23 + sqr = np.sqrt(np.dot(ssA[:,None],ssB[None]))
  24 + return np.dot(A_mA.T,B_mB)/np.sqrt(np.dot(ssA[:,None],ssB[None]))
  25 +
  26 +def fast_corr(T,H,step=0):
  27 + n, t = np.shape(T)
  28 + n, h = np.shape(H)
  29 + if not step:
  30 + T_centered = T - np.mean(T, axis=0)
  31 + H_centered = H - np.mean(H, axis=0)
  32 + ACS = np.matmul(T_centered.T, H_centered)
  33 + CS_t = np.sum(np.square(T_centered), axis=0)
  34 + CS_h = np.sum(np.square(H_centered), axis=0)
  35 + C = ACS/np.sqrt(np.outer(CS_t, CS_h))
  36 + return C
  37 + else:
  38 + C = np.zeros((n//step, t, h))
  39 + for i in range(n//step):
  40 + T_updated = T[:i*step,:]
  41 + H_updated = H[:i*step,:]
  42 + T_current = T[i*step:(i+1)*step,:]
  43 + H_current = H[i*step:(i+1)*step,:]
  44 +
  45 + T_current_centered = T_current - np.mean(T_current, axis=0)
  46 + H_current_centered = H_current - np.mean(H_current, axis=0)
  47 +
  48 + ACS_current = np.matmul(T_current_centered.T, H_current_centered)
  49 + CS_t_current = np.sum(np.square(T_current_centered), axis=0)
  50 + CS_h_current = np.sum(np.square(H_current_centered), axis=0)
  51 +
  52 + if i == 0:
  53 + ACS_updated = ACS_current
  54 + CS_t_updated = CS_t_current
  55 + CS_h_updated = CS_h_current
  56 + T_updated = T_current
  57 + H_updated = H_current
  58 + else:
  59 + delta_t = np.mean(T_updated, axis=0) - np.mean(T_current, axis=0)
  60 + delta_h = np.mean(H_updated, axis=0) - np.mean(H_current, axis=0)
  61 + ACS_updated += ACS_current + step*i*np.outer(delta_t, delta_h)/(i+1)
  62 + CS_t_updated += CS_t_current + step*i*np.square(delta_t)/(i+1)
  63 + CS_h_updated += CS_h_current + step*i*np.square(delta_h)/(i+1)
  64 +
  65 + try:
  66 + C[i,:,:] = ACS_updated/np.sqrt(np.outer(CS_t_updated, CS_h_updated))
  67 + except:
  68 + raise ValueError("Zero denominator found when computing the correlation, consider increasing the step")
  69 +
  70 + return C
shifted_1st_order_CPA.py View file @ e639f93
... ... @@ -0,0 +1,211 @@
  1 +# coding: utf8
  2 +
  3 +import numpy as np
  4 +from scipy.stats.stats import pearsonr
  5 +import matplotlib.pyplot as plt
  6 +import matplotlib.ticker as ticker
  7 +import logging as log
  8 +import scipy.io as sio
  9 +from random import randint
  10 +
  11 +import argparse
  12 +import sys
  13 +sys.path.append('./../Correlation')
  14 +import corr as corr
  15 +
  16 +import os
  17 +
  18 +log.basicConfig(format="%(levelname)s: %(message)s", level=log.INFO)
  19 +
  20 +# Hamming weight rray
  21 +HW_array = np.array([str(bin(byte)[2:]).count('1') for byte in range(256)], dtype=np.uint8)
  22 +
  23 +nb_bytes = 16
  24 +nb_k_hyp = 256
  25 +
  26 +Sbox_hex = [
  27 + 0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
  28 + 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
  29 + 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
  30 + 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
  31 + 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
  32 + 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
  33 + 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
  34 + 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
  35 + 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
  36 + 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
  37 + 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
  38 + 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
  39 + 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
  40 + 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
  41 + 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
  42 + 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16]
  43 +
  44 +Sbox_dec = np.array([int(s) for s in Sbox_hex])
  45 +
  46 +inv_Sbox_hex = [
  47 + 0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,
  48 + 0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,
  49 + 0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,
  50 + 0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,
  51 + 0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,
  52 + 0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,
  53 + 0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,
  54 + 0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,
  55 + 0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,
  56 + 0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,
  57 + 0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,
  58 + 0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,
  59 + 0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,
  60 + 0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,
  61 + 0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,
  62 + 0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D]
  63 +
  64 +inv_Sbox_dec = np.array([int(s) for s in inv_Sbox_hex])
  65 +
  66 +def compute_predictions(leakage_model,
  67 + attacked_round,
  68 + nb_traces,
  69 + plaintexts_path = './plaintexts',
  70 + ciphertexts_path = './ciphertexts',
  71 + predictions_path = './predictions',
  72 + save_mat = False):
  73 + k_hyps = np.array(range(nb_k_hyp)) #0 to 255
  74 + shift = randint(0, nb_traces)
  75 + if attacked_round.lower() == 'first':
  76 + for plaintexts_filename in os.listdir(plaintexts_path):
  77 + plaintexts = np.load(os.path.join(plaintexts_path, plaintexts_filename))
  78 + plaintexts = np.vstack((plaintexts[shift:], plaintexts[:shift]))
  79 + plaintexts = plaintexts[:nb_traces,:]
  80 + break
  81 + # Compute the reference value against which the Hamming distance is computed
  82 + if leakage_model.lower() =='hamming_weight':
  83 + ref_value = np.zeros((np.shape(plaintexts)[0], np.shape(plaintexts)[1], nb_k_hyp)).astype(np.uint8)
  84 + elif leakage_model.lower() == 'hamming_distance_plaintext':
  85 + ref_value = plaintexts[:, :, np.newaxis]
  86 + elif leakage_model.lower() == 'hamming_distance_sbox_input':
  87 + ref_value = np.bitwise_xor(plaintexts[:, :, np.newaxis], k_hyps)
  88 + predictions = HW_array[np.bitwise_xor(Sbox_dec[np.bitwise_xor(plaintexts[:, :, np.newaxis], k_hyps)], ref_value)]
  89 + elif attacked_round.lower() == 'last':
  90 + for ciphertexts_filename in os.listdir(ciphertexts_path):
  91 + ciphertexts = np.load(os.path.join(ciphertexts_path, ciphertexts_filename))
  92 + break
  93 + if leakage_model.lower() =='hamming_weight':
  94 + ref_value = np.zeros((np.shape(ciphertexts)[0], np.shape(ciphertexts)[1], nb_k_hyp)).astype(np.uint8)
  95 + ref_value = ref_value[:, :, np.newaxis]
  96 + predictions = HW_array[np.bitwise_xor(inv_Sbox_dec[np.bitwise_xor(ciphertexts[:, :, np.newaxis], k_hyps)], ref_value)]
  97 + np.save(os.path.join(predictions_path, 'prediction.npy'), predictions)
  98 + if save_mat:
  99 + sio.savemat(os.path.join(predictions_path, 'prediction.mat'), {'prediction':predictions.astype(np.float32)})
  100 + log.info("Predictions for intermediate value in {0} round computed".format(attacked_round))
  101 + return shift
  102 +
  103 +def compute_correlation(nb_traces,
  104 + shift=0,
  105 + predictions_path = './predictions',
  106 + traces_path = './traces',
  107 + correlations_path = './correlations'):
  108 + for predictions_filename in os.listdir(predictions_path):
  109 + predictions = np.load(os.path.join(predictions_path, predictions_filename))
  110 + break
  111 + log.info("Loaded predictions matrix of type {0} and size {1}".format(predictions.dtype, np.shape(predictions)))
  112 + for traces_filename in os.listdir(traces_path):
  113 + traces = np.load(os.path.join(traces_path, traces_filename))
  114 + traces = np.vstack((traces[shift:], traces[:shift]))
  115 + traces = traces[:nb_traces,:]
  116 + break
  117 + log.info("Loaded traces ("+traces_filename+") matrix of type {0} and size {1}".format(traces.dtype, np.shape(traces)))
  118 + nb_samples = np.shape(traces)[1]
  119 + correlation = np.zeros((nb_bytes, nb_samples, nb_k_hyp))
  120 + for byte in range(nb_bytes):
  121 + log.info("Computing correlation for byte {0}".format(byte))
  122 + correlation[byte,:,:] = corr.corr(traces, predictions[:,byte,:])
  123 + np.save(os.path.join(correlations_path, 'corr_byte_'+str(byte)+'.npy'), correlation[byte,:,:])
  124 +
  125 +def display_results(correct_key,
  126 + attacked_round = 'first',
  127 + correlations_path = './correlations'):
  128 + correct_key = [correct_key[i:i+2] for i in range(0, len(correct_key), 2)]
  129 + guessed_key = ""
  130 + for byte, correct_byte in enumerate(correct_key):
  131 + corr = np.load(os.path.join(correlations_path, 'corr_byte_'+str(byte)+'.npy'))
  132 + max_corr_per_key_byte = abs(corr).max(axis=0)
  133 + max_corr_samples = abs(corr).max(axis=1)
  134 + hex_key_byte = hex(np.argmax(max_corr_per_key_byte))[2:-1].zfill(2)
  135 + sample_of_interest = np.argmax(max_corr_samples)
  136 + corr = round(max(max_corr_per_key_byte), 3)
  137 + if attacked_round.lower() == 'first':
  138 + log.info("=> Guessed key byte #{0} : \"{1}\", found at sample {2}".format(str(byte).zfill(2), hex_key_byte, sample_of_interest))
  139 + position_correct_byte = list(np.sort(max_corr_per_key_byte)[::-1]).index(max_corr_per_key_byte[int(correct_byte, 16)])
  140 + log.info(" => Correct one is \"{0}\", ranked {1}/{2} with correlation={3}".format(correct_byte, position_correct_byte, nb_k_hyp, corr))
  141 + guessed_key+=hex_key_byte
  142 + elif attacked_round.lower() == 'last':
  143 + log.info("=> Guessed last round key byte #{0} : \"{1}\", found at sample {2}".format(str(byte).zfill(2), hex_key_byte, sample_of_interest))
  144 + position_correct_byte = list(np.sort(max_corr_per_key_byte)[::-1]).index(max_corr_per_key_byte[int(correct_byte, 16)])
  145 + log.info(" => Correct one is \"{0}\", ranked {1}/{2} with correlation={3}".format(correct_byte, position_correct_byte, nb_k_hyp, corr))
  146 + guessed_key+=hex_key_byte
  147 + if attacked_round.lower() =='first':
  148 + print "=> Guessed key is \"{0}\"".format(guessed_key)
  149 + elif attacked_round.lower() =='last':
  150 + print "=> Guessed last round key is \"{0}\"".format(guessed_key)
  151 +
  152 +def plot_results(target_bytes,
  153 + correlations_path = './correlations',
  154 + plot_path="./plots"):
  155 + for byte in target_bytes:
  156 + log.info("Plotting for byte {0}".format(byte))
  157 + corr = abs(np.load(os.path.join(correlations_path, 'corr_byte_'+str(byte)+'.npy')))
  158 + nb_samples, nb_hyp = np.shape(corr)
  159 + max_corr_per_key_byte = abs(corr).max(axis=0)
  160 + max_corr_samples = abs(corr).max(axis=1)
  161 + key_byte = np.argmax(max_corr_per_key_byte)
  162 + hex_key_byte = hex(key_byte)[2:-1].zfill(2)
  163 + sample_of_interest = np.argmax(max_corr_samples)
  164 + plt.figure()
  165 + plt.plot(corr[:,:key_byte], color = 'grey', linewidth = 0, marker="o", markersize=4)
  166 + plt.plot(corr[:,-key_byte:], color = 'grey', linewidth = 0, marker="o", markersize=4)
  167 + plt.plot(corr[:,key_byte], color = 'blue', linewidth = 0, marker="o", markersize=4)
  168 + plt.xlim(0,nb_samples)
  169 + plt.ylim(0, 1)
  170 + plt.xlabel("Echantillons (temps)")
  171 + plt.ylabel("Correlation")
  172 + # plt.savefig(os.path.join(plot_path, 'corr_vs_samples_byte_'+str(byte)+'.png'))
  173 + # plt.show()
  174 + plt.figure()
  175 + plt.plot(corr.transpose(), color = 'grey', linewidth = 0, marker="o", markersize=4)
  176 + plt.plot(nb_samples*[key_byte], corr[:,key_byte], color = 'red', linewidth = 0, marker="o", markersize=4)
  177 + plt.xlim(-1,nb_hyp-1)
  178 + axes = plt.gca()
  179 + axes.get_xaxis().set_major_locator(ticker.MultipleLocator(16))
  180 + axes.get_xaxis().set_major_formatter(ticker.FormatStrFormatter("%x"))
  181 + plt.ylim(0, 1)
  182 + plt.xlabel("Hypotheses de cle")
  183 + plt.ylabel("Correlation")
  184 + plt.show()
  185 + # plt.savefig(os.path.join(plot_path, 'corr_vs_k_hyp_byte_'+str(byte)+'.png'))
  186 +
  187 +if __name__ == "__main__":
  188 +
  189 + parser = argparse.ArgumentParser(description='Preprocess traces')
  190 + parser.add_argument("nb_traces", type=int)
  191 + args = parser.parse_args()
  192 + nb_traces = args.nb_traces
  193 + compute_predictions(leakage_model = 'hamming_weight', attacked_round = 'first', nb_traces = nb_traces)
  194 + compute_correlation(nb_traces = nb_traces)
  195 + display_results(correct_key = '0123456789abcdef123456789abcdef0')
  196 + # for i in range(1):
  197 + # shift = compute_predictions(leakage_model = 'hamming_weight', attacked_round = 'first', nb_traces = nb_traces)
  198 + # compute_correlation(nb_traces = nb_traces, shift = shift)
  199 + # display_results(correct_key = "0123456789abcdef123456789abcdef0")
  200 + # display_results(correct_key = '4dfbe0f27221fe10a78d4adc8e490469')
  201 + # plot_results(range(15))
  202 + # compute_predictions(leakage_model = 'hamming_distance_plaintext', attacked_round = 'first')
  203 + # compute_correlation()
  204 + # display_results(correct_key = '0123456789abcdef123456789abcdef0')
  205 + # compute_predictions(leakage_model = 'hamming_distance_sbox_input', attacked_round = 'first')
  206 + # compute_correlation()
  207 + # display_results(correct_key = '0123456789abcdef123456789abcdef0')
  208 + # plot_results()
  209 + # compute_predictions(attacked_round = 'last')
  210 + # compute_correlation()
  211 + # display_results(correct_key = '0123456789abcdef123456789abcdef0')