Commit fc8e9fac1413ffa414bb6458c25cc0a3a394394c

Authored by Brice COLOMBIER
1 parent 9be20683c0
Exists in master

Add new functions

Showing 11 changed files with 361 additions and 3 deletions

add_correlated_noise_SNR.py View file @ fc8e9fa
  1 +import numpy as np
  2 +import matplotlib.pyplot as plt
  3 +
  4 +import argparse
  5 +import os
  6 +
  7 +def add_correlated_noise_SNR(traces_names, SNR, ratio):
  8 + nb_sets = len(traces_names)
  9 + for traces_index, traces_name in enumerate(traces_names):
  10 + noiseless_traces = np.load(traces_name)
  11 + if traces_index == 0:
  12 + nb_traces, nb_samples = np.shape(np.load(traces_name))
  13 + noisy_traces = np.zeros((nb_traces, nb_samples, nb_sets))
  14 + noisy_traces[...,traces_index] = np.load(traces_name)
  15 + plt.subplot(2, 2, 1)
  16 + plt.plot(noisy_traces[0,:,0])
  17 + var_signal = np.std(noisy_traces)
  18 + var_noise = var_signal*pow(10, (int(SNR/20)))
  19 + noise = np.random.normal(0, var_noise, np.shape(noisy_traces))
  20 + correlated_noise = np.multiply(noise, ratio)
  21 + uncorrelated_noise = np.multiply(noise, 1-ratio)
  22 + # Remove last dimension
  23 + correlated_noise = correlated_noise[:,:,0]
  24 + # Add one dimension
  25 + correlated_noise = correlated_noise[...,np.newaxis]
  26 + # Duplicate along the new dimension
  27 + correlated_noise = np.repeat(correlated_noise, nb_sets, axis=2)
  28 + noisy_traces = noisy_traces+correlated_noise+uncorrelated_noise
  29 + return noisy_traces
  30 +
  31 +if __name__ == "__main__":
  32 +
  33 + def restricted_ratio(x):
  34 + x = float(x)
  35 + if x < 0.0 or x > 1.0:
  36 + raise argparse.ArgumentTypeError("Ratio {} not in range [0.0, 1.0]".format(x))
  37 + return x
  38 +
  39 + # Parsing arguments
  40 + parser = argparse.ArgumentParser(description='Add correlated noise to multiple sets of traces, assuming there was no noise in the first place')
  41 + parser.add_argument("traces_names", nargs='+', type=str)
  42 + parser.add_argument("-n", "--SNR", type=int)
  43 + parser.add_argument("-r", "--ratio", type=restricted_ratio)
  44 + args = parser.parse_args()
  45 +
  46 + noisy_traces = add_correlated_noise_SNR(args.traces_names, args.SNR, args.ratio)
  47 +
  48 + for i in range(noisy_traces.shape[-1]):
  49 +
  50 + dirname, filename = os.path.split(args.traces_names[i])
  51 + filename, extension = os.path.splitext(filename)
  52 +
  53 + np.save(os.path.join(dirname, "noisy_SNR{}_ratio{}_{}".format(args.SNR, args.ratio, filename)), noisy_traces[...,i])
add_correlated_noise_variance.py View file @ fc8e9fa
  1 +import numpy as np
  2 +import matplotlib.pyplot as plt
  3 +
  4 +import argparse
  5 +import os
  6 +
  7 +def add_correlated_noise_variance(traces_names, variance, ratio):
  8 + nb_sets = len(traces_names)
  9 + for traces_index, traces_name in enumerate(traces_names):
  10 + noiseless_traces = np.load(traces_name)
  11 + if traces_index == 0:
  12 + nb_traces, nb_samples = np.shape(np.load(traces_name))
  13 + noisy_traces = np.zeros((nb_traces, nb_samples, nb_sets))
  14 + noisy_traces[...,traces_index] = np.load(traces_name)
  15 + plt.subplot(2, 2, 1)
  16 + plt.plot(noisy_traces[0,:,0])
  17 + noise = np.random.normal(0, variance, np.shape(noisy_traces))
  18 + correlated_noise = np.multiply(noise, ratio)
  19 + uncorrelated_noise = np.multiply(noise, 1-ratio)
  20 + # Remove last dimension
  21 + correlated_noise = correlated_noise[:,:,0]
  22 + # Add one dimension
  23 + correlated_noise = correlated_noise[...,np.newaxis]
  24 + # Duplicate along the new dimension
  25 + correlated_noise = np.repeat(correlated_noise, nb_sets, axis=2)
  26 + noisy_traces = noisy_traces+correlated_noise+uncorrelated_noise
  27 + return noisy_traces
  28 +
  29 +if __name__ == "__main__":
  30 +
  31 + def restricted_ratio(x):
  32 + x = float(x)
  33 + if x < 0.0 or x > 1.0:
  34 + raise argparse.ArgumentTypeError("Ratio {} not in range [0.0, 1.0]".format(x))
  35 + return x
  36 +
  37 + # Parsing arguments
  38 + parser = argparse.ArgumentParser(description='Add correlated noise to multiple sets of traces, assuming there was no noise in the first place')
  39 + parser.add_argument("traces_names", nargs='+', type=str)
  40 + parser.add_argument("-v", "--variance", type=float)
  41 + parser.add_argument("-r", "--ratio", type=restricted_ratio)
  42 + args = parser.parse_args()
  43 +
  44 + noisy_traces = add_correlated_noise_variance(args.traces_names, args.variance, args.ratio)
  45 +
  46 + for i in range(noisy_traces.shape[-1]):
  47 +
  48 + dirname, filename = os.path.split(args.traces_names[i])
  49 + filename, extension = os.path.splitext(filename)
  50 +
  51 + np.save(os.path.join(dirname, "noisy_variance{}_ratio{}_{}".format(args.variance, args.ratio, filename)), noisy_traces[...,i])
add_noise.py View file @ fc8e9fa
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def power(signal):
  7 + return np.mean(np.sum(np.multiply(signal, signal), axis = 1))
  8 +
  9 +def add_noise(traces, variance):
  10 + nb_traces, nb_samples = np.shape(traces)
  11 +
  12 + noise = np.random.normal(0, variance, np.shape(traces))
  13 + mean_trace = np.tile(np.mean(traces, axis = 0), (nb_traces, 1))
  14 + print np.shape(mean_trace)
  15 +
  16 + print np.shape(power(traces))
  17 + print " P(noisy traces) :", 10*np.log10(power(traces))
  18 + print " P(clean traces) :", 10*np.log10(power(mean_trace))
  19 + print "P(included noise) :", 10*np.log10(power(traces - mean_trace))
  20 + print " P(added noise) :", 10*np.log10(power(noise))
  21 + print " SNR :", 10*np.log10(power(mean_trace)/power(noise))
  22 + return traces+noise
  23 +
  24 +if __name__ == "__main__":
  25 +
  26 + # Parsing arguments
  27 + parser = argparse.ArgumentParser(description='Preprocess traces')
  28 + parser.add_argument("traces_name", type=str)
  29 + parser.add_argument("-v", "--variance", type=float)
  30 + args = parser.parse_args()
  31 +
  32 + traces = np.load(args.traces_name)
  33 +
  34 + noisy_traces = add_noise(traces, args.variance)
  35 +
  36 + dirname, filename = os.path.split(args.traces_name)
  37 + filename, extension = os.path.splitext(filename)
  38 +
  39 + np.save(os.path.join(dirname, "noisy_"+str(args.variance)+"_"+filename), noisy_traces)
bin_to_npy.py View file @ fc8e9fa
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def bin_to_npy(traces_name, input_format, nb_rows, nb_columns):
  7 + traces = np.fromfile(traces_name, dtype=input_format)
  8 + if not nb_columns:
  9 + nb_columns = int(np.shape(traces)[0]/nb_rows)
  10 + traces = np.reshape(traces, (nb_rows, nb_columns))
  11 + print np.shape(traces)
  12 + dirname, filename = os.path.split(traces_name)
  13 + filename, extension = os.path.splitext(filename)
  14 +
  15 + print os.path.join(dirname, filename)+".npy"
  16 + np.save(os.path.join(dirname, filename)+".npy", traces)
  17 +
  18 +if __name__ == "__main__":
  19 +
  20 + # Parsing arguments
  21 + parser = argparse.ArgumentParser(description='Preprocess traces')
  22 + parser.add_argument("traces_name", type=str)
  23 + parser.add_argument("-i", "--input_format", type=str)
  24 + parser.add_argument("-r", "--nb_rows", type=int)
  25 + parser.add_argument("-c", "--nb_columns", type=int, default=False)
  26 + args = parser.parse_args()
  27 +
  28 + bin_to_npy(args.traces_name, args.input_format, args.nb_rows, args.nb_columns)
desynchronize.py View file @ fc8e9fa
  1 +import numpy as np
  2 +import math
  3 +
  4 +import argparse
  5 +import os
  6 +
  7 +def desynchronize(traces, desynch):
  8 + nb_traces = np.shape(traces)[0]
  9 + desynch_amounts = np.random.binomial(desynch*2, 0.5, nb_traces)
  10 + for i in range(len(traces)):
  11 + start_pad = [traces[i,0]]*desynch_amounts[i]
  12 + amount = desynch_amounts[i]
  13 + print amount
  14 + drop = np.shape(traces)[1] - amount
  15 + traces[i,:] = np.hstack((start_pad, traces[i,:drop]))
  16 + return traces
  17 +
  18 +if __name__ == "__main__":
  19 +
  20 + # Parsing arguments
  21 + parser = argparse.ArgumentParser(description='Preprocess traces')
  22 + parser.add_argument("traces_name", type=str)
  23 + parser.add_argument("-d", "--desynch", type=int)
  24 + args = parser.parse_args()
  25 +
  26 + # Load traces from file
  27 + traces = np.load(args.traces_name)
  28 + dirname, filename = os.path.split(args.traces_name)
  29 +
  30 + desynchronized_traces = desynchronize(traces, args.desynch)
  31 +
  32 + # np.save(os.path.join(dirname, "desynch_"+str(args.desynch)+"_"+filename), desynchronized_traces)
  33 + np.save(os.path.join(dirname, "desynch_"+str(args.desynch)+"_"+filename), desynchronized_traces)
duplicate.py View file @ fc8e9fa
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def duplicate(traces, number):
  7 + return np.repeat(traces, number, axis=0)
  8 +
  9 +if __name__ == "__main__":
  10 +
  11 + # Parsing arguments
  12 + parser = argparse.ArgumentParser(description='Preprocess traces')
  13 + parser.add_argument("traces_name", type=str)
  14 + parser.add_argument("-n", "--number", type=int)
  15 + args = parser.parse_args()
  16 +
  17 + traces = np.load(args.traces_name)
  18 +
  19 + duplicated_traces = duplicate(traces, args.number)
  20 +
  21 + dirname, filename = os.path.split(args.traces_name)
  22 + filename, extension = os.path.splitext(filename)
  23 +
  24 + np.save(os.path.join(dirname, "dup_"+str(args.number)+"_"+filename), duplicated_traces)
insert_nops.py View file @ fc8e9fa
  1 +import numpy as np
  2 +import math
  3 +import random
  4 +
  5 +import argparse
  6 +import os
  7 +
  8 +def insert_nops(traces, number, width, beginning, end, log):
  9 + nb_traces, nb_samples = np.shape(traces)
  10 + noped_traces = np.zeros((nb_traces, nb_samples+(number*width)))
  11 + if end == -1:
  12 + end = nb_samples
  13 + for trace in range(nb_traces):
  14 + # Pick nop ampitudes between -0.03 and 0.02
  15 + nop_amp = (5*np.random.random_sample((number*width))-3)/100
  16 + # Choose positions of insertion for the nops
  17 + pos = random.sample(range(beginning, end), number)
  18 + # Multiply the positions of insertion by width
  19 + pos = np.repeat(pos, width, 0)
  20 + # Insert the nops at the positions
  21 + noped_traces[trace] = np.insert(traces[trace], pos, nop_amp)
  22 + if log:
  23 + with open("log_nop_n{0}_w{1}.txt".format(number, width), "a") as logfile:
  24 + for i in np.unique(pos):
  25 + print i
  26 + logfile.write(str(int(i))+",")
  27 + logfile.write(str(width)+"\n")
  28 + print np.shape(noped_traces)
  29 + return noped_traces
  30 +
  31 +if __name__ == "__main__":
  32 +
  33 + # Parsing arguments
  34 + parser = argparse.ArgumentParser(description='Preprocess traces')
  35 + parser.add_argument("traces_name", type=str)
  36 + parser.add_argument("-n", "--number", type=int)
  37 + parser.add_argument("-w", "--width", type=int)
  38 + parser.add_argument("-b", "--beginning", type=int, nargs='?', default=0)
  39 + parser.add_argument("-e", "--end", type=int, nargs='?', default=-1)
  40 + parser.add_argument("-l", "--log", action='store_true')
  41 + args = parser.parse_args()
  42 +
  43 + # Load traces from file
  44 + traces = np.load(args.traces_name)
  45 + dirname, filename = os.path.split(args.traces_name)
  46 +
  47 + noped_traces = insert_nops(traces,
  48 + args.number,
  49 + args.width,
  50 + args.beginning,
  51 + args.end,
  52 + args.log)
  53 + # np.save(os.path.join(dirname, "desynch_"+str(args.desynch)+"_"+filename), desynchronized_traces)
  54 + np.save(os.path.join(dirname, "noped_n{0}_w{1}_{2}.npy".format(args.number, args.width, filename)), noped_traces)
laplacian_eigenmap.py View file @ fc8e9fa
  1 +import numpy as np
  2 +from sklearn.manifold import SpectralEmbedding
  3 +
  4 +import argparse
  5 +import os
  6 +
  7 +def le(traces_name, nb_components, nb_cores, k_neighbors):
  8 + print "Loading data"
  9 + traces = np.load(traces_name)
  10 +
  11 + dirname, filename = os.path.split(traces_name)
  12 + filename, extension = os.path.splitext(filename)
  13 +
  14 + print "Performing the embedding"
  15 + le = SpectralEmbedding(n_components=nb_components, n_jobs=nb_cores, n_neighbors=k_neighbors)
  16 + le_traces = le.fit_transform(traces)
  17 + print "Saving data"
  18 + np.save(os.path.join(dirname, "le_"+str(nb_components)+"_"+filename)+'.npy', le_traces)
  19 +
  20 +if __name__ == "__main__":
  21 +
  22 + # Parsing arguments
  23 + parser = argparse.ArgumentParser(description='Preprocess traces')
  24 + parser.add_argument("traces_name", type=str)
  25 + parser.add_argument("-n", "--nb_components", type=int)
  26 + parser.add_argument("-c", "--nb_cores", type=int, default=1)
  27 + parser.add_argument("-k", "--k_neighbors", type=int)
  28 + args = parser.parse_args()
  29 +
  30 + le(args.traces_name, args.nb_components, args.nb_cores, args.k_neighbors)
  1 +import numpy as np
  2 +from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
  3 +from sklearn.externals import joblib
  4 +
  5 +import argparse
  6 +import os
  7 +
  8 +def lda(traces_name, keys_name, nb_components, target_key_byte, save_model = False, load_model = False, model = "./model.pkl"):
  9 + print "Loading data"
  10 + traces = np.load(traces_name)
  11 + keys = np.load(keys_name)
  12 +
  13 + dirname, filename = os.path.split(traces_name)
  14 + filename, extension = os.path.splitext(filename)
  15 +
  16 + if load_model:
  17 + print "Loading model"
  18 + model = joblib.load(model)
  19 + else:
  20 + print "Building LDA object"
  21 + lda = LinearDiscriminantAnalysis(n_components=nb_components)
  22 + print "Fitting"
  23 + model = lda.fit(traces, keys[:,target_key_byte])
  24 + lda_traces = model.transform(traces)
  25 + np.save(os.path.join(dirname, "lda_"+str(nb_components)+"_"+filename)+'.npy', lda_traces)
  26 + if save_model:
  27 + joblib.dump(model, os.path.join(dirname, "model_lda_"+str(nb_components)+"_"+filename)+'.pkl')
  28 +
  29 +if __name__ == "__main__":
  30 +
  31 + # Parsing arguments
  32 + parser = argparse.ArgumentParser(description='Preprocess traces')
  33 + parser.add_argument("traces_name", type=str)
  34 + parser.add_argument("keys_name", type=str)
  35 + parser.add_argument("-n", "--nb_components", type=int)
  36 + parser.add_argument("-b", "--target_key_byte", type=int)
  37 + parser.add_argument("-s", "--save_model", action='store_true', default=False)
  38 + parser.add_argument("-l", "--load_model", action='store_true', default=False)
  39 + parser.add_argument("-m", "--model", type=str, default="./model.pkl")
  40 + args = parser.parse_args()
  41 +
  42 + lda(args.traces_name, args.keys_name, args.nb_components, args.target_key_byte, args.save_model, args.load_model, args.model)
pairwise_operation.py View file @ fc8e9fa
... ... @@ -74,12 +74,13 @@
74 74 preprocessed_trace = np.zeros((preprocessed_trace_length, nb_traces), dtype=dtype)
75 75 current_index = 0
76 76 indexes = np.zeros((preprocessed_trace_length),dtype='i,i')
  77 + average_trace = np.mean(traces, axis=0)
77 78 # For all possible start indices in the window
78 79 for current_distance in range(minimum_distance, window_size):
79 80 print(current_distance)
80 81 for current_start_index in range(nb_samples - current_distance):
81 82 if first_chunk or (not first_chunk and current_start_index+current_distance>=window_size-1):
82   - value = np.array(operation(traces[:,current_start_index], traces[:,current_start_index+current_distance]), ndmin=2)
  83 + value = np.array(operation(traces[:,current_start_index], traces[:,current_start_index+current_distance], average_trace[current_start_index], average_trace[current_start_index+current_distance]), ndmin=2)
83 84 # Store the resulting vector
84 85 preprocessed_trace[current_index,:] = np.transpose(value)[:,0]
85 86 indexes[current_index] = (start_index+current_start_index, start_index+current_start_index+current_distance)
86 87  
... ... @@ -98,13 +99,15 @@
98 99 return (args[0] + args[1])*(args[0] + args[1])
99 100 def absolute_difference(*args):
100 101 return abs(args[0] - args[1])
  102 +def centered_product(*args):
  103 + return (args[0] - args[2]) * (args[1] - args[3])
101 104  
102 105 if __name__ == "__main__":
103 106  
104 107 # Parsing arguments
105 108 parser = argparse.ArgumentParser(description='Preprocess traces')
106 109 parser.add_argument("traces_name", type=str)
107   - parser.add_argument("-o", "--op", type=str, choices=['addition', 'multiplication', 'squared_addition', 'absolute_difference'])
  110 + parser.add_argument("-o", "--op", type=str, choices=['addition', 'multiplication', 'squared_addition', 'absolute_difference', 'centered_product'])
108 111 parser.add_argument("-w", "--window_size", type=int)
109 112 parser.add_argument("-d", "--min_dist", type=int)
110 113 parser.add_argument("-t", "--dtype", type=str, nargs='?', default='float64')
... ... @@ -117,6 +120,7 @@
117 120 elif args.op == 'addition': operation = addition
118 121 elif args.op == 'squared_addition': operation = squared_addition
119 122 elif args.op == 'absolute_difference': operation = absolute_difference
  123 + elif args.op == 'centered_product': operation = centered_product
120 124 dtype = np.dtype(args.dtype).type
121 125 if args.verbose:
122 126 log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
... ... @@ -23,7 +23,7 @@
23 23 pca_traces = model.transform(traces)
24 24 np.save(os.path.join(dirname, "pca_"+str(nb_components)+"_"+filename)+'.npy', pca_traces)
25 25 if save_model:
26   - joblib.dump(model, os.path.join(dirname, "model_"+str(nb_components)+"_"+filename)+'.pkl')
  26 + joblib.dump(model, os.path.join(dirname, "model_pca_"+str(nb_components)+"_"+filename)+'.pkl')
27 27  
28 28 if __name__ == "__main__":
29 29