Commit d547646229dcd7f42eeb2be1e07258ae955d6433

Authored by Brice COLOMBIER
1 parent eaefdbe47c
Exists in master

Add functions

Showing 8 changed files with 146 additions and 21 deletions

... ... @@ -4,4 +4,5 @@
4 4 *.*~
5 5 *.org
6 6 flymd.*
  7 +plot.py
check_ram.py View file @ d547646
  1 +import psutil
  2 +
  3 +import logging as log
  4 +import argparse
  5 +
  6 +def check_ram(nb_samples, sample_size, verbose=False):
  7 +
  8 + if verbose:
  9 + log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
  10 + log.info("Verbose output enabled")
  11 + else:
  12 + log.basicConfig(format="%(levelname)s: %(message)s")
  13 + available = psutil.virtual_memory().available
  14 + log.info("{0} bits available".format(available))
  15 + to_store = nb_samples*sample_size
  16 + log.info("{0} bits to store".format(to_store))
  17 + if to_store < available:
  18 + return True
  19 + else:
  20 + log.warning("Not enough RAM available to store all the samples")
  21 + return False
  22 +
  23 +if __name__ == "__main__":
  24 +
  25 + parser = argparse.ArgumentParser(description='Check RAM availability')
  26 + parser.add_argument("--nb_samples", type=int)
  27 + parser.add_argument("--sample_size", type=int)
  28 + parser.add_argument('-v', '--verbose', action='store_true')
  29 + args = parser.parse_args()
  30 + print check_ram(args.nb_samples, args.sample_size, args.verbose)
downsample.py View file @ d547646
... ... @@ -25,6 +25,6 @@
25 25  
26 26 downsampled_traces = downsample(traces, args.factor, args.offset)
27 27  
28   - print os.path(args.traces)
29   - np.save("downsampled_"+str(args.factor)+"_"+args.traces_name, downsampled_traces)
  28 + dirname, filename = os.path.split(args.traces_name)
  29 + np.save(os.path.join(dirname, "downsampled_"+str(args.factor)+"_"+filename), downsampled_traces)
filter_highest_variance.py View file @ d547646
... ... @@ -46,5 +46,5 @@
46 46 np.save(dirname+"filtered_variance_"+str(args.ratio)+"_"+filename, filtered_variance_traces)
47 47 elif args.nsamples:
48 48 filtered_variance_traces = filter_highest_variance(traces, nsamples=args.nsamples)
49   - np.save(dirname+"filtered_variance_"+str(args.nsamples)+"_"+filename, filtered_variance_traces)
  49 + np.save(os.path.join(dirname, "filtered_variance_"+str(args.nsamples)+"_"+filename), filtered_variance_traces)
group_process.py View file @ d547646
  1 +import numpy as np
  2 +import math
  3 +
  4 +import argparse
  5 +import os
  6 +
  7 +# Possible operations
  8 +from pairwise_operation import pairwise_operation
  9 +from downsample import downsample
  10 +from filter_highest_variance import filter_highest_variance
  11 +from shorten import shorten
  12 +
  13 +def group_process(traces_name, prefix, nb_shares, function):
  14 + for share in [str(i) for i in range(nb_shares)]:
  15 + traces = np.load(prefix+"_"+share+"_"+traces_name)
  16 + yield function(traces, 4)
  17 +
  18 +if __name__ == "__main__":
  19 +
  20 + filename = "masked_traces.npy"
  21 + prefix = "split"
  22 + for counter, processed_trace in enumerate(group_process(filename, prefix, 4, downsample)):
  23 + print "BLA"
  24 + np.save("processed_"+prefix+"_"+str(counter)+"_"+filename, processed_trace)
  25 +
  26 +
pairwise_operation.py View file @ d547646
1 1 # Imports for processing
2 2 import numpy as np
3 3 from skimage.util.shape import view_as_windows
  4 +import check_ram
4 5  
5 6 # Imports for parallel processing
6 7 from multiprocessing import Pool, current_process
... ... @@ -10,6 +11,7 @@
10 11 # Imports for script utilitaries
11 12 import logging as log
12 13 import argparse
  14 +import os
13 15  
14 16 # Imports for dev
15 17 import cProfile
... ... @@ -62,6 +64,10 @@
62 64 if first_chunk or (not first_chunk and current_start_index+current_distance>=window_size-1):
63 65 preprocessed_trace_length+=1
64 66 # Allocate memory for the preprocessed trace
  67 + if not check_ram.check_ram(preprocessed_trace_length, 64):
  68 + raise MemoryError("The preprocessed trace cannot be stored in RAM")
  69 + else:
  70 + log.info('This will occupy {0} Mo in RAM'.format(round(preprocessed_trace_length/(1024*1024))))
65 71 preprocessed_trace = np.zeros((preprocessed_trace_length, nb_traces), dtype=dtype)
66 72 current_index = 0
67 73 indexes = np.zeros((preprocessed_trace_length),dtype='i,i')
... ... @@ -76,7 +82,8 @@
76 82 # Increase the running index
77 83 current_index+=1
78 84 preprocessed_trace = np.transpose(preprocessed_trace)
79   - return preprocessed_trace, indexes
  85 + # return preprocessed_trace, indexes
  86 + return preprocessed_trace
80 87  
81 88 # Operations to perform on the pairs of samples
82 89 def multiplication(*args):
... ... @@ -96,7 +103,7 @@
96 103 parser.add_argument("--op", type=str, choices=['addition', 'multiplication', 'squared_addition', 'absolute_difference'])
97 104 parser.add_argument("--window_size", type=int)
98 105 parser.add_argument("--min_dist", type=int)
99   - parser.add_argument("--dtype", type=str)
  106 + parser.add_argument("--dtype", type=str, nargs='?', default='float64')
100 107 parser.add_argument("--ncores", type=int)
101 108 parser.add_argument('-v', '--verbose', action='store_true')
102 109 args = parser.parse_args()
103 110  
... ... @@ -115,13 +122,12 @@
115 122  
116 123  
117 124 # Generate fake data for testing purposes
118   - fake_nb_samples = 1000
119   - fake_nb_traces = 1000
120   - # test_array = np.array([xrange(i, i+data_set_width-data_set_height) for i in xrange(data_set_height)])
121   - test_array = np.random.rand(fake_nb_traces, fake_nb_samples)
122   - traces = test_array
  125 + # fake_nb_samples = 1000
  126 + # fake_nb_traces = 1000
  127 + # test_array = np.random.rand(fake_nb_traces, fake_nb_samples)
  128 + # traces = test_array
123 129 # Load traces from file
124   - # traces = np.load(args.traces_name)
  130 + traces = np.load(args.traces_name)
125 131 # Shorten the traces to split them into equally-sized chunks
126 132 shortened = 0
127 133 while int(np.shape(traces)[1] + (args.ncores - 1)*(args.window_size - 1))%args.ncores != 0:
128 134  
129 135  
130 136  
... ... @@ -146,18 +152,19 @@
146 152 start_indexes = xrange(0, nb_samples, chunk_size-args.window_size+1)
147 153 arguments = [(trace_set, args.window_size, args.min_dist, operation, dtype, start_index, args.verbose, first_chunk) for (trace_set, start_index, first_chunk) in zip(traces, start_indexes, [True]+(args.ncores-1)*[False])]
148 154 # Run the parallel computation
149   - parallel_processing_results = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
  155 + # parallel_processing_results = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
  156 + preprocessed_traces_parallel = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
150 157 # print parallel_processing_results
151   - preprocessed_traces_parallel, indexes_parallel = parallel_processing_results[::2], parallel_processing_results[1::2]
152   - preprocessed_traces_parallel = np.concatenate(preprocessed_traces_parallel, axis=1)
153   - indexes_parallel = np.concatenate(indexes_parallel, axis=1)
  158 + # preprocessed_traces_parallel, indexes_parallel = parallel_processing_results[::2], parallel_processing_results[1::2]
  159 + # preprocessed_traces_parallel = np.concatenate(preprocessed_traces_parallel, axis=1)
  160 + # indexes_parallel = np.concatenate(indexes_parallel, axis=1)
154 161  
155 162 # Compare normal and parallel processing
156   - if preprocessed_traces:
157   - if np.all(preprocessed_traces.sort()==preprocessed_traces_parallel.sort()):
158   - if np.all(indexes.sort()==indexes_parallel.sort()):
159   - print "###\nGreat, sequential and\nparallel processing\nreturned the same result\n###"
  163 + # if preprocessed_traces:
  164 + # if np.all(preprocessed_traces.sort()==preprocessed_traces_parallel.sort()):
  165 + # if np.all(indexes.sort()==indexes_parallel.sort()):
  166 + # print "###\nGreat, sequential and\nparallel processing\nreturned the same result\n###"
160 167  
161 168 dirname, filename = os.path.split(args.traces_name)
162   - np.save(dirname+"pairwise_"+args.operation+"_"+filename, preprocessed_traces_parallel)
  169 + np.save(os.path.join(dirname, "pairwise_"+args.op+"_"+filename), preprocessed_traces_parallel)
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def shorten(traces, start_index, stop_index):
  7 + return traces[:,start_index:stop_index]
  8 +
  9 +if __name__ == "__main__":
  10 +
  11 + # Parsing arguments
  12 + parser = argparse.ArgumentParser(description='Preprocess traces')
  13 + parser.add_argument("traces_name", type=str)
  14 + parser.add_argument("--start_index", type=int)
  15 + parser.add_argument("--stop_index", type=int)
  16 + args = parser.parse_args()
  17 +
  18 + fake_nb_samples = 10
  19 + fake_nb_traces = 2
  20 +
  21 + test_array = np.random.random_integers(10, size=(fake_nb_traces, fake_nb_samples))
  22 + # traces = test_array
  23 + # Load traces from file
  24 + traces = np.load(args.traces_name)
  25 +
  26 + shortened_traces = shorten(traces, args.start_index, args.stop_index)
  27 +
  28 + dirname, filename = os.path.split(args.traces_name)
  29 + np.save(os.path.join(dirname, filename+"_shortened"), shortened_traces)
  1 +import numpy as np
  2 +import math
  3 +
  4 +import argparse
  5 +import os
  6 +
  7 +def split(traces, nb_shares):
  8 + return np.split(traces, nb_shares)
  9 +
  10 +if __name__ == "__main__":
  11 +
  12 + # Parsing arguments
  13 + parser = argparse.ArgumentParser(description='Preprocess traces')
  14 + parser.add_argument("traces_name", type=str)
  15 + parser.add_argument("--nb_shares", type=int)
  16 + args = parser.parse_args()
  17 +
  18 + fake_nb_samples = 10
  19 + fake_nb_traces = 12
  20 +
  21 + test_array = np.random.random_integers(10, size=(fake_nb_traces, fake_nb_samples))
  22 + traces = test_array
  23 + # Load traces from file
  24 + traces = np.load(args.traces_name)
  25 +
  26 + split_traces = split(traces, args.nb_shares)
  27 +
  28 + dirname, filename = os.path.split(args.traces_name)
  29 +
  30 + for counter, traces in enumerate(split_traces):
  31 + np.save(os.path.join(dirname, "split_"+str(counter)+"_"+filename), traces)