Commit 9a5e435bdf03e8325ff6c73b12e6518b8fa9127e

Authored by Brice COLOMBIER
1 parent 1929d31358
Exists in master

Add function to turn the fortran flag of a numpy array to false

Showing 5 changed files with 46 additions and 20 deletions

... ... @@ -7,4 +7,5 @@
7 7 flymd.*
8 8 test*.*
9 9 *.png
  10 +traces/
contiguify.py View file @ 9a5e435
  1 +import numpy as np
  2 +
  3 +import argparse
  4 +import os
  5 +
  6 +def contiguify(traces_names):
  7 + for traces_name in traces_names:
  8 + print "Processing file {0}".format(traces_name)
  9 + traces = np.load(traces_name)
  10 +
  11 + dirname, filename = os.path.split(traces_name)
  12 + filename, extension = os.path.splitext(filename)
  13 +
  14 + np.save(os.path.join(dirname, "contiguous_"+filename)+'.npy', np.ascontiguousarray(traces))
  15 +
  16 +if __name__ == "__main__":
  17 +
  18 + # Parsing arguments
  19 + parser = argparse.ArgumentParser(description='Preprocess traces')
  20 + parser.add_argument("traces_names", type=str, nargs='+')
  21 + args = parser.parse_args()
  22 +
  23 + contiguify(args.traces_names)
pairwise_operation.py View file @ 9a5e435
1 1 # Imports for processing
2 2 import numpy as np
3   -from skimage.util.shape import view_as_windows
  3 +# from skimage.util.shape import view_as_windows
4 4  
5 5 # Imports for parallel processing
6 6 from multiprocessing import Pool, current_process
7 7  
8 8  
9 9  
... ... @@ -58,33 +58,32 @@
58 58 log.info("Processing {0} traces of {1} samples".format(nb_traces, nb_samples))
59 59  
60 60 preprocessed_trace_length = 0
61   - for current_distance in xrange(minimum_distance, window_size):
62   - for current_start_index in xrange(nb_samples - current_distance):
  61 + for current_distance in range(minimum_distance, window_size):
  62 + for current_start_index in range(nb_samples - current_distance):
63 63 if first_chunk or (not first_chunk and current_start_index+current_distance>=window_size-1):
64 64 preprocessed_trace_length+=1
65 65 log.info("Original traces occupying {0} Mbytes".format(traces.nbytes/1000000))
66 66 increase = preprocessed_trace_length/np.shape(traces)[1]
67 67 log.info("Preprocessed traces will occupy {0}x more memory".format(increase))
68 68 log.info("Preprocessed traces will occupy {0} Mbytes".format(increase*traces.nbytes/1000000))
69   - bla
70 69 # Allocate memory for the preprocessed trace
71 70 preprocessed_trace = np.zeros((preprocessed_trace_length, nb_traces), dtype=dtype)
72 71 current_index = 0
73 72 indexes = np.zeros((preprocessed_trace_length),dtype='i,i')
74 73 # For all possible start indices in the window
75   - for current_distance in xrange(minimum_distance, window_size):
76   - print current_distance
77   - for current_start_index in xrange(nb_samples - current_distance):
  74 + for current_distance in range(minimum_distance, window_size):
  75 + print(current_distance)
  76 + for current_start_index in range(nb_samples - current_distance):
78 77 if first_chunk or (not first_chunk and current_start_index+current_distance>=window_size-1):
79 78 value = np.array(operation(traces[:,current_start_index], traces[:,current_start_index+current_distance]), ndmin=2)
80 79 # Store the resulting vector
81 80 preprocessed_trace[current_index,:] = np.transpose(value)[:,0]
82 81 indexes[current_index] = (start_index+current_start_index, start_index+current_start_index+current_distance)
83 82 # Increase the running index
84   - if current_index in []:
85   - print current_index
86   - print current_start_index
87   - print current_distance
  83 + # if current_index in []:
  84 + # print current_index
  85 + # print current_start_index
  86 + # print current_distance
88 87 current_index+=1
89 88 preprocessed_trace = np.transpose(preprocessed_trace)
90 89 # return preprocessed_trace, indexes
... ... @@ -133,7 +132,7 @@
133 132 # traces = test_array
134 133 # Load traces from file
135 134 traces = np.load(args.traces_name)
136   - print "Input:", np.shape(traces)
  135 + print("Input:", np.shape(traces))
137 136 # Shorten the traces to split them into equally-sized chunks
138 137 shortened = 0
139 138 while int(np.shape(traces)[1] + (args.ncores - 1)*(args.window_size - 1))%args.ncores != 0:
... ... @@ -141,7 +140,7 @@
141 140 shortened+=1
142 141 if shortened:
143 142 log.warning("Traces shortened by {0} samples to split them into equally-sized chunks".format(shortened))
144   - nb_samples = np.shape(traces)[1]
  143 + nb_samples = int(np.shape(traces)[1])
145 144  
146 145 # Perform non-parallel preprocessing
147 146 preprocessed_traces = []
148 147  
149 148  
... ... @@ -150,12 +149,15 @@
150 149 # Init pool of workers for parallel preprocessing
151 150 pool = Pool(args.ncores)
152 151 # Compute the size of each chunk of traces to be preprocessed
153   - chunk_size = int(np.shape(traces)[1]+(args.ncores-1)*(args.window_size-1))/args.ncores
  152 + chunk_size = int(int(np.shape(traces)[1]+(args.ncores-1)*(args.window_size-1))/args.ncores)
154 153 log.info("Traces split into {0} chunks of {1} samples".format(args.ncores, chunk_size))
155 154 # Split the traces, with overlapping
156   - traces = view_as_windows(traces, (np.shape(traces)[0],chunk_size), step=chunk_size-args.window_size+1)[0]
  155 + # if args.ncores > 1:
  156 + # traces = view_as_windows(traces, (np.shape(traces)[0],chunk_size), step=chunk_size-args.window_size+1)[0]
  157 + # else:
  158 + traces = [traces]
157 159 # Create the list of arguments
158   - start_indexes = xrange(0, nb_samples, chunk_size-args.window_size+1)
  160 + start_indexes = range(0, nb_samples, chunk_size-args.window_size+1)
159 161 arguments = [(trace_set, args.window_size, args.min_dist, operation, dtype, start_index, args.verbose, first_chunk) for (trace_set, start_index, first_chunk) in zip(traces, start_indexes, [True]+(args.ncores-1)*[False])]
160 162 # Run the parallel computation
161 163 # parallel_processing_results = np.concatenate(pool.map(multi_run_wrapper, arguments), axis=1)
... ... @@ -172,6 +174,6 @@
172 174 # print "###\nGreat, sequential and\nparallel processing\nreturned the same result\n###"
173 175  
174 176 dirname, filename = os.path.split(args.traces_name)
175   - print "Final trace of dimensions:", np.shape(preprocessed_traces_parallel)
  177 + print("Final trace of dimensions:", np.shape(preprocessed_traces_parallel))
176 178 np.save(os.path.join(dirname, "pairwise_"+args.op+"_"+filename), preprocessed_traces_parallel)
... ... @@ -11,7 +11,7 @@
11 11 # Parsing arguments
12 12 parser = argparse.ArgumentParser(description='Preprocess traces')
13 13 parser.add_argument("traces_name", type=str)
14   - parser.add_argument("--start_index", type=int)
  14 + parser.add_argument("--start_index", type=int, default=0)
15 15 parser.add_argument("--stop_index", type=int)
16 16 args = parser.parse_args()
17 17  
... ... @@ -12,7 +12,7 @@
12 12 # Parsing arguments
13 13 parser = argparse.ArgumentParser(description='Preprocess traces')
14 14 parser.add_argument("traces_name", type=str)
15   - parser.add_argument("--nb_shares", type=int)
  15 + parser.add_argument("-n", "--nb_shares", type=int)
16 16 args = parser.parse_args()
17 17  
18 18 fake_nb_samples = 10