Commit 6429772f15ab759d431a0b9f596c83bab794479d

Authored by Brice COLOMBIER
1 parent 9a5e435bdf
Exists in master

Add new functions

Showing 11 changed files with 220 additions and 21 deletions

1 1 *.*~
2 2 *.npy
  3 +*.csv
3 4 *.bin
4 5 *.txt
5 6 *.org
  1 +import numpy as np
  2 +import matplotlib.pyplot as plt
  3 +import matplotlib.patches as patches
  4 +import argparse
  5 +from numpy import genfromtxt
  6 +import os
  7 +
  8 +def plot(csv_file, original_trace_file, threshold):
  9 + csv = np.genfromtxt(csv_file)
  10 + print np.shape(csv)
  11 + ax = plt.gca()
  12 + plt.yscale('symlog')
  13 + minor_ticks = range(-100, 0, 10)+range(-9, 0, 1)+[0]+range(1, 10, 1)+range(10, 110, 10)
  14 + print minor_ticks
  15 + ax.set_yticks(minor_ticks, minor=True)
  16 + plt.ylabel("t-statistic")
  17 + plt.grid(b=True, which='minor', color='#C0C0C0', linestyle='--', zorder=0)
  18 + plt.grid(b=True, which='major', color='k', linestyle='-', zorder=1)
  19 + plt.xlim(0, np.shape(csv)[0])
  20 + plt.plot(csv, zorder=5, color='#666666')
  21 + plt.plot(np.shape(csv)[0]*[4.5], color='red', zorder=10)
  22 + plt.plot(np.shape(csv)[0]*[-4.5], color='red', zorder=10)
  23 + dirname, filename = os.path.split(csv_file)
  24 + filename, extension = os.path.splitext(filename)
  25 + plt.savefig(os.path.join(dirname, "t-statistic.png"))
  26 + plt.show()
  27 + plt.close()
  28 + plt.xlim(0, np.shape(csv)[0])
  29 + pois = []
  30 + for index, t_val in enumerate(csv):
  31 + if abs(t_val) > threshold:
  32 + pois.append(index)
  33 + traces = np.load(original_trace_file)
  34 + plt.plot(traces[0], linewidth = 0.5)
  35 + plt.scatter(pois, traces[0,pois], marker = 'x', color = 'red', zorder=5)
  36 + plt.savefig(os.path.join(dirname, "highlight_"+str(threshold)+"_on_trace.png"))
  37 + plt.show()
  38 + plt.close()
  39 +
  40 +if __name__ == "__main__":
  41 +
  42 + # Parsing arguments
  43 + parser = argparse.ArgumentParser(description='Preprocess traces')
  44 + parser.add_argument("csv_file", type=str)
  45 + parser.add_argument("original_trace_file", type=str)
  46 + parser.add_argument("-t", "--threshold", type=float)
  47 + args = parser.parse_args()
  48 + plot(args.csv_file, args.original_trace_file, args.threshold)
dispatch_against_plaintexts.py View file @ 6429772
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def dispatch_against_plaintexts(plaintexts, ref_plaintext):
  7 + print "Plaintexts:", np.shape(plaintexts)
  8 + ref_plaintext = ref_plaintext.replace('0x', '')
  9 + ref_plaintext = [ref_plaintext[i:i+2] for i in range(0, len(ref_plaintext), 2)]
  10 + ref_plaintext = np.array([int(hex_val, 16) for hex_val in ref_plaintext])
  11 + print "Ref ", ref_plaintext
  12 + print "======================"
  13 + for i in range(15):
  14 + print plaintexts[i]
  15 + # int_array_ref_plaintext =
  16 + indexes_match_plaintext = np.equal(plaintexts, ref_plaintext[None,:])[:,0]
  17 + indexes_non_match_plaintext = np.not_equal(plaintexts, ref_plaintext[None,:])[:,0]
  18 + indexes_match_plaintext = np.where(indexes_match_plaintext == True)[0]
  19 + indexes_non_match_plaintext = np.where(indexes_non_match_plaintext == True)[0]
  20 + return indexes_match_plaintext, indexes_non_match_plaintext
  21 +
  22 +if __name__ == "__main__":
  23 +
  24 + # Parsing arguments
  25 + parser = argparse.ArgumentParser(description='Dispatch against plaintexts')
  26 + parser.add_argument("plaintexts_name", type=str)
  27 + parser.add_argument("-r", "--ref_plaintext", type=str, default='0xda39a3ee5e6b4b0d3255bfef95601890')
  28 + args = parser.parse_args()
  29 +
  30 + plaintexts = np.load(args.plaintexts_name)
  31 +
  32 + indexes_match_plaintext, indexes_non_match_plaintext = dispatch_against_plaintexts(plaintexts, args.ref_plaintext)
  33 +
  34 + dirname, filename = os.path.split(args.plaintexts_name)
  35 + filename, extension = os.path.splitext(filename)
  36 +
  37 + np.savetxt(os.path.join(dirname, filename+".output_0"), indexes_non_match_plaintext, fmt='%0i')
  38 + np.savetxt(os.path.join(dirname, filename+".output_1"), indexes_match_plaintext, fmt='%0i')
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def keep(traces, start_index, number):
  7 + return traces[start_index:number,:]
  8 +
  9 +if __name__ == "__main__":
  10 +
  11 + # Parsing arguments
  12 + parser = argparse.ArgumentParser(description='Preprocess traces')
  13 + parser.add_argument("traces_name", type=str)
  14 + parser.add_argument("--start_index", type=int, default=0)
  15 + parser.add_argument("-n", "--number", type=int)
  16 + args = parser.parse_args()
  17 +
  18 + fake_nb_samples = 10
  19 + fake_nb_traces = 2
  20 +
  21 + test_array = np.random.random_integers(10, size=(fake_nb_traces, fake_nb_samples))
  22 + # traces = test_array
  23 + # Load traces from file
  24 + traces = np.load(args.traces_name)
  25 +
  26 + keeped_traces = keep(traces, args.start_index, args.number)
  27 +
  28 + dirname, filename = os.path.split(args.traces_name)
  29 + filename, extension = os.path.splitext(filename)
  30 +
  31 + np.save(os.path.join(dirname, "kept_"+str(args.start_index)+"_"+str(args.number)+"_"+filename+extension), keeped_traces)
... ... @@ -6,7 +6,8 @@
6 6  
7 7 def merge(*arg):
8 8 arg = arg[0] # Get first tuple element
9   - final_trace = np.concatenate([np.load(i) for i in arg], axis=0)
  9 + print arg
  10 + final_trace = np.vstack([np.load(i) for i in arg])
10 11 print np.shape(final_trace)
11 12 return final_trace
12 13  
npy_to_bin.py View file @ 6429772
... ... @@ -8,8 +8,8 @@
8 8  
9 9 dirname, filename = os.path.split(traces_name)
10 10 filename, extension = os.path.splitext(filename)
11   -
12   - traces.astype(output_format).tofile(os.path.join(dirname, filename)+'.bin')
  11 +
  12 + traces.astype(output_format).tofile(os.path.join(dirname, filename)+".bin")
13 13  
14 14 if __name__ == "__main__":
15 15  
npy_to_hex.py View file @ 6429772
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def npy_to_hex(plaintexts_name):
  7 + plaintexts = np.load(plaintexts_name)
  8 +
  9 + print plaintexts.dtype
  10 +
  11 + dirname, filename = os.path.split(plaintexts_name)
  12 + filename, extension = os.path.splitext(filename)
  13 +
  14 + with open(os.path.join(dirname, filename+".hex"), 'w') as target_file:
  15 + if len(np.shape(plaintexts)) == 1: # There is only one plaintext
  16 + first_byte = '0x'+hex(plaintexts[0])[2:].zfill(2).upper()
  17 + other_bytes = ''.join([hex_val[2:].zfill(2).upper() for hex_val in map(hex, plaintexts)[1:]])
  18 + target_file.write(first_byte+other_bytes+"\n")
  19 + elif len(np.shape(plaintexts)) == 2: # There are multiple plaintexts
  20 + for plaintext in plaintexts:
  21 + first_byte = '0x'+hex(plaintext[0])[2:].zfill(2).upper()
  22 + other_bytes = ''.join([hex_val[2:].zfill(2).upper() for hex_val in map(hex, plaintext)[1:]])
  23 + target_file.write(first_byte+other_bytes+"\n")
  24 + else:
  25 + raise ValueError("Invalid file, cannot handle such dimansions")
  26 +
  27 +if __name__ == "__main__":
  28 +
  29 + # Parsing arguments
  30 + parser = argparse.ArgumentParser(description='Preprocess plaintexts')
  31 + parser.add_argument("plaintexts_name", type=str)
  32 + args = parser.parse_args()
  33 +
  34 + npy_to_hex(args.plaintexts_name)
npy_to_raw.py View file @ 6429772
... ... @@ -3,24 +3,25 @@
3 3 import argparse
4 4 import os
5 5  
6   -def npy_to_raw(traces_name):
  6 +def npy_to_raw(traces_name, new_type):
7 7 traces = np.load(traces_name)
8 8  
9   - print traces
10   -
  9 + print "Original type :", traces.dtype
  10 + print "New chosen type:", new_type
11 11 dirname, filename = os.path.split(traces_name)
12 12 filename, extension = os.path.splitext(filename)
13 13  
14   - traces.tofile(os.path.join(dirname, filename)+'.raw',
15   - sep = "",
16   - format = "%s")
  14 + traces.astype(new_type).tofile(os.path.join(dirname, filename)+'.raw',
  15 + sep = "",
  16 + format = "%s")
17 17  
18 18 if __name__ == "__main__":
19 19  
20 20 # Parsing arguments
21 21 parser = argparse.ArgumentParser(description='Preprocess traces')
22 22 parser.add_argument("traces_name", type=str)
  23 + parser.add_argument("-t", "--new_type", type=str)
23 24 args = parser.parse_args()
24 25  
25   - npy_to_raw(args.traces_name)
  26 + npy_to_raw(args.traces_name, args.new_type)
pairwise_operation.py View file @ 6429772
... ... @@ -25,6 +25,7 @@
25 25 operation,
26 26 dtype,
27 27 start_index=0,
  28 + record_pairs=False,
28 29 verbose=False,
29 30 first_chunk=True):
30 31  
... ... @@ -58,10 +59,13 @@
58 59 log.info("Processing {0} traces of {1} samples".format(nb_traces, nb_samples))
59 60  
60 61 preprocessed_trace_length = 0
61   - for current_distance in range(minimum_distance, window_size):
62   - for current_start_index in range(nb_samples - current_distance):
63   - if first_chunk or (not first_chunk and current_start_index+current_distance>=window_size-1):
64   - preprocessed_trace_length+=1
  62 + with open('records_pairs_w{0}_d{1}.csv'.format(window_size, minimum_distance), 'w') as record_pairs_file:
  63 + record_pairs_file.write("current_index, current_start_index, current_distance\n")
  64 + for current_distance in range(minimum_distance, window_size):
  65 + for current_start_index in range(nb_samples - current_distance):
  66 + if first_chunk or (not first_chunk and current_start_index+current_distance>=window_size-1):
  67 + preprocessed_trace_length+=1
  68 + record_pairs_file.write("{0}, {1}, {2}\n".format(preprocessed_trace_length, current_start_index, current_distance))
65 69 log.info("Original traces occupying {0} Mbytes".format(traces.nbytes/1000000))
66 70 increase = preprocessed_trace_length/np.shape(traces)[1]
67 71 log.info("Preprocessed traces will occupy {0}x more memory".format(increase))
... ... @@ -80,10 +84,6 @@
80 84 preprocessed_trace[current_index,:] = np.transpose(value)[:,0]
81 85 indexes[current_index] = (start_index+current_start_index, start_index+current_start_index+current_distance)
82 86 # Increase the running index
83   - # if current_index in []:
84   - # print current_index
85   - # print current_start_index
86   - # print current_distance
87 87 current_index+=1
88 88 preprocessed_trace = np.transpose(preprocessed_trace)
89 89 # return preprocessed_trace, indexes
... ... @@ -109,6 +109,7 @@
109 109 parser.add_argument("-d", "--min_dist", type=int)
110 110 parser.add_argument("-t", "--dtype", type=str, nargs='?', default='float64')
111 111 parser.add_argument("-n", "--ncores", type=int)
  112 + parser.add_argument("-r", "--record_pairs", action='store_true', default=False)
112 113 parser.add_argument('-v', '--verbose', action='store_true')
113 114 args = parser.parse_args()
114 115  
115 116  
... ... @@ -159,9 +160,15 @@
159 160 # Create the list of arguments
160 161 start_indexes = range(0, nb_samples, chunk_size-args.window_size+1)
161 162 arguments = [(trace_set, args.window_size, args.min_dist, operation, dtype, start_index, args.verbose, first_chunk) for (trace_set, start_index, first_chunk) in zip(traces, start_indexes, [True]+(args.ncores-1)*[False])]
  163 + print arguments
162 164 # Run the parallel computation
163 165 # parallel_processing_results = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
164   - preprocessed_traces_parallel = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
  166 + if args.ncores > 1:
  167 + preprocessed_traces_parallel = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
  168 + elif args.ncores == 1:
  169 + preprocessed_traces_parallel = multi_run_wrapper(arguments[0])
  170 + else:
  171 + raise ValueError("You must have more than 0 cores")
165 172 # print parallel_processing_results
166 173 # preprocessed_traces_parallel, indexes_parallel = parallel_processing_results[::2], parallel_processing_results[1::2]
167 174 # preprocessed_traces_parallel = np.concatenate(preprocessed_traces_parallel, axis=1)
... ... @@ -21,7 +21,7 @@
21 21 for i in range(nb_subplots):
22 22 axarr[i].plot((traces[i,:]), color='#666666')
23 23 elif nb_subplots == 1:
24   - plt.plot(traces[0,:], zorder=-1, color='#666666')
  24 + plt.plot(traces[0,:], zorder=-1, color='#666666', linewidth = 0.5)
25 25 plt.scatter([poi[0] for poi in points_of_interest], [traces[0,poi[0]] for poi in points_of_interest], color='green', zorder=1)
26 26 plt.scatter([poi[0]+poi[1] for poi in points_of_interest], [traces[0,poi[0]+poi[1]] for poi in points_of_interest], color='red', zorder=1)
27 27 for poi in points_of_interest:
... ... @@ -34,7 +34,7 @@
34 34 # Parsing arguments
35 35 parser = argparse.ArgumentParser(description='Preprocess traces')
36 36 parser.add_argument("traces_file", type=str)
37   - parser.add_argument("-n", "--nb_subplots", type=int)
  37 + parser.add_argument("-n", "--nb_subplots", type=int, default=1)
38 38 args = parser.parse_args()
39 39 plot(args.traces_file, args.nb_subplots)
scale_float_to_uinttype.py View file @ 6429772
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def scale_float_to_uinttype(traces, uinttype):
  7 + if uinttype not in ['uint8', 'uint16', 'uint32', 'uint64']:
  8 + raise TypeError ('Cannot rescale to this type, choose an unsigned integer (uintXX) type instead')
  9 + new_max = np.iinfo(uinttype).max
  10 + new_min = np.iinfo(uinttype).min
  11 + prev_max = np.amax(traces)
  12 + prev_min = np.amin(traces)
  13 + traces -= prev_min
  14 + offset_max = np.amax(traces)
  15 + scale_factor = float(new_max)/offset_max
  16 + traces *= scale_factor
  17 + # print "Achievable new min :", np.iinfo(uinttype).min
  18 + # print "Achievable new max :", np.iinfo(uinttype).max
  19 + # print "Actual new min :", np.amin(scaled_traces)
  20 + # print "Actual new max :", np.amax(scaled_traces)
  21 + return traces.astype(uinttype)
  22 +
  23 +if __name__ == "__main__":
  24 +
  25 + # Parsing arguments
  26 + parser = argparse.ArgumentParser(description='Preprocess traces')
  27 + parser.add_argument("traces_name", type=str)
  28 + parser.add_argument("--uinttype", type=str)
  29 + args = parser.parse_args()
  30 +
  31 + traces = np.load(args.traces_name)
  32 +
  33 + scaled_traces = scale_float_to_uinttype(traces, args.uinttype)
  34 +
  35 + dirname, filename = os.path.split(args.traces_name)
  36 + filename, extension = os.path.splitext(filename)
  37 +
  38 + np.save(os.path.join(dirname, filename+"_as_"+args.uinttype+extension), scaled_traces)