Commit 757796eef57f90ee3bb4f646e8a97a4afd8c4183

Authored by Brice Colombier
1 parent 6d251a84f6
Exists in master

First test of real CASCADE execution

Showing 11 changed files with 904 additions and 856 deletions

CASCADE/binary_par.py View file @ 757796e
... ... @@ -10,7 +10,7 @@
10 10 import get_parities_from_indices as gpfi
11 11  
12 12  
13   -def binary_par(reference_response, blocks_to_correct, response_on_board):
  13 +def binary_par(reference_response, blocks_to_correct, tclsh, board_manager):
14 14  
15 15 """Implementation of the BINARY algorithm found in the CASCADE protocol.
16 16  
... ... @@ -27,7 +27,8 @@
27 27 blocks_to_correct_first_half = [x[:int(len(x)/2)] for x in blocks_to_correct]
28 28 blocks_return = []
29 29 ref_rep = []
30   - parities = gpfi.get_parities_from_indices(blocks_to_correct_first_half, response_on_board)
  30 + parities = gpfi.get_parities_from_indices(blocks_to_correct_first_half, tclsh, board_manager)
  31 + print parities
31 32 if len(blocks_to_correct[0]) > 2:
32 33 for i, j in enumerate(parities):
33 34 if par.parity(reference_response[i][:block_size/2], j):
CASCADE/cascade.py View file @ 757796e
... ... @@ -9,19 +9,20 @@
9 9 import binary_par as bi_par
10 10 import parity as par
11 11 import split as split
12   -import get_parities_from_indices as gpfi
13 12 import flip_bits as fb
14 13 import flatten as fl
15 14 import random as rd
16 15 import swap_blocks as sb
  16 +import get_parities_from_indices as gpfi
17 17  
18   -
19   -def cascade(reference_response, error_rate, nb_passes, response_on_board, initial_block_size=0):
  18 +def cascade(reference_response, error_rate, nb_passes, tclsh, board_manager, initial_block_size=0):
20 19  
21 20 """Implementation of the CASCADE reconciliation protocol"""
22 21  
23 22 powers = [2**i for i in range(12)] # Up to 512-bit responses
24 23 if (not len(reference_response) in powers):
  24 + print reference_response
  25 + print len(reference_response)
25 26 raise ValueError('Message length is not a power of two or is too large')
26 27  
27 28 if not initial_block_size:
... ... @@ -55,7 +56,7 @@
55 56  
56 57 split.split(reference_response, block_size)
57 58 split.split(indices, block_size)
58   - parities = gpfi.get_parities_from_indices(indices, response_on_board)
  59 + parities = gpfi.get_parities_from_indices(indices, tclsh, board_manager)
59 60 blocks_to_correct = []
60 61 reference_response_to_correct = []
61 62 for block_index, (reference_response_block, block_parity) in enumerate(zip(reference_response, parities)):
62 63  
63 64  
... ... @@ -70,11 +71,13 @@
70 71 if passe > 0:
71 72 even_parity_blocks.append(indices[block_index])
72 73 if blocks_to_correct:
  74 + print "Narrow down to single bit errors"
73 75 # Narrow down to single bit errors
74 76 while len(blocks_to_correct[0]) > 2:
75   - reference_response_to_correct, blocks_to_correct = bi_par.binary_par(reference_response_to_correct, blocks_to_correct, response_on_board)
  77 + print blocks_to_correct
  78 + reference_response_to_correct, blocks_to_correct = bi_par.binary_par(reference_response_to_correct, blocks_to_correct, tclsh, board_manager)
76 79 # Final BINARY execution where single PUF bits are queried from the board
77   - _, indices_to_flip = bi_par.binary_par(reference_response_to_correct, blocks_to_correct, response_on_board)
  80 + _, indices_to_flip = bi_par.binary_par(reference_response_to_correct, blocks_to_correct, tclsh, board_manager)
78 81 if passe > 0:
79 82 for index_to_flip in indices_to_flip:
80 83 # Move blocks from one group to the other if they contain the bit to flip
81 84  
... ... @@ -95,9 +98,9 @@
95 98 block_to_correct = min(odd_parity_blocks, key=len) # Get the smallest block
96 99 reference_response_block = [reference_response[indices.index(x)] for x in block_to_correct]
97 100 while len(block_to_correct) > 2:
98   - [reference_response_block], [block_to_correct] = bi_par.binary_par([reference_response_block], [block_to_correct], response_on_board)
  101 + [reference_response_block], [block_to_correct] = bi_par.binary_par([reference_response_block], [block_to_correct], tclsh, board_manager)
99 102 # Final BINARY execution where single PUF bits are queried from the board
100   - _, backtracked_pos = bi_par.binary_par([reference_response_block], [block_to_correct], response_on_board)
  103 + _, backtracked_pos = bi_par.binary_par([reference_response_block], [block_to_correct], tclsh, board_manager)
101 104 # backtracked_pos = bi.binary(reference_response_block, block_to_correct, response_on_board)
102 105 fb.flip_bits(reference_response, backtracked_pos, indices)
103 106 # Move blocks from one group to the other if they contain the bit to flip
CASCADE/get_parities_from_indices.py View file @ 757796e
  1 +# Author: Brice Colombier
  2 +# Laboratoire Hubert Curien
  3 +# 42000 Saint-Etienne - France
  4 +# Contact: b.colombier@univ-st-etienne.fr
  5 +# Project: Demonstrator
  6 +# File: get_parities_from_indices.py
  7 +# Date : 2016-10-12
  8 +
  9 +import Tkinter
  10 +
  11 +def get_parities_from_indices(indices, tclsh, board_manager):
  12 +
  13 + """Get the parities of the on-board response blocks.
  14 +
  15 + Hardware-specific implementation.
  16 + >>> get_parities_from_indices([[1, 5], [7, 2], [3, 6], [4, 0]])
  17 + [0, 1, 1, 0]
  18 + """
  19 +
  20 + parities = tclsh.eval(board_manager.command_get_parities_from_indices(indices))
  21 + return [int(i) for i in list(parities.replace(" ", ""))]
  22 +
  23 +
  24 +if __name__ == "__main__":
  25 + print get_parities_from_indices()
add_frame_elements.py View file @ 757796e
... ... @@ -25,6 +25,11 @@
25 25 text="Connect",
26 26 command=self.connect)
27 27 self.com_port_button_connect.pack(side=LEFT)
  28 + self.com_port_button_reset = Button(self.motherboard_frame,
  29 + text="Reset",
  30 + state=NORMAL,
  31 + command=self.reset_board)
  32 + self.com_port_button_reset.pack(side=LEFT)
28 33 self.com_port_button_disconnect = Button(self.motherboard_frame,
29 34 text="Disconnect",
30 35 state=NORMAL,
... ... @@ -57,8 +62,9 @@
57 62 self.reconciliation_parameter_number_of_passes_spinbox.grid(row=1,
58 63 column=2)
59 64 self.perform_reconciliation_button=Button(self.CASCADE_frame,
60   - text="Perform reconciliation",
61   - state="normal")
  65 + text="Perform reconciliation",
  66 + state="normal",
  67 + command=self.perform_reconciliation)
62 68 self.perform_reconciliation_button.grid(row=0,
63 69 column=3,
64 70 rowspan=2,
... ... @@ -97,7 +103,10 @@
97 103 text="Search...",
98 104 command=self.select_file)
99 105 self.button_open_design.grid(row=0, column=3)
100   - self.choose_design_format_option_menu = OptionMenu(self.file_frame, self.design_format, "BENCH", "BENCH", "BLIF", "SLIF", "EDIF", "Xilinx EDIF", "VHDL Dataflow", "VHDL Structural", "Verilog Dataflow", "Verilog Structural")
  106 + self.choose_design_format_option_menu = OptionMenu(self.file_frame,
  107 + self.design_format,
  108 + "BENCH",
  109 + "BENCH", "BLIF", "SLIF", "EDIF", "Xilinx EDIF", "VHDL Dataflow", "VHDL Structural", "Verilog Dataflow", "Verilog Structural")
101 110 self.choose_design_format_option_menu.grid(row=1, column=1, sticky=W)
102 111 self.build_graph_button=Button(self.file_frame,
103 112 text="Build graph",
... ... @@ -23,28 +23,28 @@
23 23 import declare_pack_frames
24 24 import status_bar
25 25  
26   -
27 26 from key_derivation.blake2 import BLAKE2s
28 27  
29 28 from locking.locking import locking
30 29  
31 30 from masking.masking import masking
32 31  
33   -from parsers.build_bench import build as build_bench
34   -from parsers.build_blif import build as build_blif
35   -from parsers.build_edif import build as build_edif
36   -from parsers.build_slif import build as build_slif
37   -from parsers.build_verilog_rtl import build as build_verilog_rtl
  32 +from parsers.build_bench import build as build_bench
  33 +from parsers.build_blif import build as build_blif
  34 +from parsers.build_edif import build as build_edif
  35 +from parsers.build_slif import build as build_slif
  36 +from parsers.build_verilog_df import build as build_verilog_df
38 37 from parsers.build_verilog_struct import build as build_verilog_struct
39   -from parsers.build_vhd_rtl import build as build_vhd_rtl
40   -from parsers.build_vhd_struct import build as build_vhd_struct
41   -from parsers.build_xilinx import build as build_xilinx
  38 +from parsers.build_vhd_df import build as build_vhd_df
  39 +from parsers.build_vhd_struct import build as build_vhd_struct
  40 +from parsers.build_xilinx import build as build_xilinx
42 41  
43 42 from generate_modified_netlist.convert_back_bench import convert_back as convert_back_bench
44   -from generate_modified_netlist.convert_back_vhd import convert_back as convert_back_vhd
  43 +from generate_modified_netlist.convert_back_vhd import convert_back as convert_back_vhd
45 44  
46 45 from boards_management import board_commands
47 46  
  47 +from CASCADE.cascade import cascade
48 48  
49 49 class App:
50 50  
51 51  
... ... @@ -90,9 +90,9 @@
90 90 ("SLIF files", ".slif"),
91 91 ("EDIF files", ".edf"),
92 92 ("Xilinx EDIF files", ".edf"),
93   - ("RTL VHDL files", ".vhd"),
  93 + ("Dataflow VHDL files", ".vhd"),
94 94 ("Structural VHDL files", ".vhd"),
95   - ("RTL Verilog files", ".v"),
  95 + ("Dataflow Verilog files", ".v"),
96 96 ("Structural Verilog files", ".v")]
97 97 self.filename.set(tkFileDialog.askopenfilename(initialdir = "./user_space/",
98 98 filetypes=ftypes))
99 99  
... ... @@ -114,11 +114,11 @@
114 114 elif self.design_format.get() == "Xilinx EDIF":
115 115 self.g, self.prim_in, self.prim_out, self.nodes = build_xilinx(self.filename.get())
116 116 elif self.design_format.get() == "VHDL Dataflow":
117   - self.g, self.prim_in, self.prim_out, self.nodes = build_vhd_rtl(self.filename.get())
  117 + self.g, self.prim_in, self.prim_out, self.nodes = build_vhd_df(self.filename.get())
118 118 elif self.design_format.get() == "VHDL Structural":
119 119 self.g, self.prim_in, self.prim_out, self.nodes = build_vhd_struct(self.filename.get())
120 120 elif self.design_format.get() == "Verilog Dataflow":
121   - self.g, self.prim_in, self.prim_out, self.nodes = build_verilog_rtl(self.filename.get())
  121 + self.g, self.prim_in, self.prim_out, self.nodes = build_verilog_df(self.filename.get())
122 122 elif self.design_format.get() == "Verilog Structural":
123 123 self.g, self.prim_in, self.prim_out, self.nodes = build_verilog_struct(self.filename.get())
124 124 self.graph_info.set(str(str(len(self.nodes))+" nodes, "+
... ... @@ -151,8 +151,13 @@
151 151 def get_PUF_reference_response(self):
152 152 self.PUF_reference_response_displayed.set("")
153 153 try:
154   - self.PUF_reference_response = self.tcl_obj.eval(self.board_manager.generate_and_offload_response())
155   - self.PUF_reference_response_displayed.set(self.PUF_reference_response)
  154 + self.PUF_reference_response_displayed.set(self.tcl_obj.eval(self.board_manager.generate_and_offload_response()))
  155 + self.PUF_reference_response = self.PUF_reference_response_displayed.get()
  156 + temporary_response = ""
  157 + for i in self.PUF_reference_response:
  158 + binary = bin(int(i, 16))[2:]
  159 + temporary_response+=((4-len(binary))*"0"+binary)
  160 + self.PUF_reference_response = [int(i) for i in temporary_response]
156 161 except:
157 162 self.status.set("Reference response could not be obtained")
158 163  
... ... @@ -186,7 +191,7 @@
186 191 self.top = Toplevel()
187 192 self.top.title("License")
188 193 try:
189   - self.License_file = open("LICENSE.txt", 'r')
  194 + self.License_file = open("LICENSE", 'r')
190 195 self.License_text = self.License_file.read()
191 196 self.License_file.close()
192 197 except:
... ... @@ -248,7 +253,7 @@
248 253 def derive_key_from_response(self):
249 254 self.salt = bytes(''.join(random.SystemRandom().choice(["0", "1"]) for _ in range(32)))
250 255 PRK_f = BLAKE2s(digest_size=32, key=self.salt)
251   - PRK_f.update(self.PUF_reference_response)
  256 + PRK_f.update(self.PUF_reference_response_displayed)
252 257 self.key = ''.join('{0:08b}'.format(ord(x), 'b') for x in PRK_f.final()).replace("0b", "")
253 258 key_file_name = "./user_space/key_"+""+".txt"
254 259 with open(key_file_name, "w") as key_file:
... ... @@ -282,6 +287,18 @@
282 287 with open(filename, "w") as aw_file:
283 288 aw_file.write("Unocking word concatenated with unmasking word\n"+self.unlocking_word+self.unmasking_word)
284 289 self.message_AW_saved.set(str("Activation word saved under "+filename))
  290 +
  291 + def perform_reconciliation(self):
  292 + cascade(self.PUF_reference_response,
  293 + 0.02,
  294 + int(self.reconciliation_parameter_number_of_passes_spinbox.get()),
  295 + self.tcl_obj,
  296 + self.board_manager,
  297 + int(self.reconciliation_parameter_initial_block_size_spinbox.get()))
  298 + print ("Reconciliation done")
  299 +
  300 + def reset_board(self):
  301 + self.tcl_obj.eval(self.board_manager.reset_boards())
285 302  
286 303 def reset(self):
287 304 print "Not implemented yet"
boards_management/board_commands.py View file @ 757796e
... ... @@ -53,14 +53,18 @@
53 53 command = "getStatus $dev\n"
54 54 return command
55 55  
56   - def get_parity_from_indices(self, indices):
57   -
58   - command = "sendFabricCommand $dev 4 5\n\
59   -sendFabricCommand $dev 4 "+str(len(indices))+"\n"
60   - indices_command = "\n".join(["sendFabricCommand $dev 4 "+str(i) for i in indices])+"\n"
61   - final_command = "sendFabricCommand $dev 4 6\n\
62   -return [expr [lindex [sendFabricCommand $dev 4 0] 1] & 1]"
63   - return command+indices_command+final_command
  56 + def command_get_parities_from_indices(self, indices):
  57 +
  58 + command = ""
  59 + command+="set parities \"\"\n"
  60 + for i in indices:
  61 + command+="sendFabricCommand $dev 4 5\n\
  62 +sendFabricCommand $dev 4 "+str(len(i))+"\n"
  63 + command+="\n".join(["sendFabricCommand $dev 4 "+str(j) for j in i])+"\n"
  64 + command+="sendFabricCommand $dev 4 6\n\
  65 +lappend parities [expr [lindex [sendFabricCommand $dev 4 0] 1] & 1]\n"
  66 + command+="return $parities"
  67 + return command
64 68  
65 69 def disconnect(self):
66 70 command = "disconnect $dev\n"
boards_management/get_response.py View file @ 757796e
... ... @@ -3,33 +3,22 @@
3 3 # 42000 Saint-Etienne - France
4 4 # Contact: b.colombier@univ-st-etienne.fr
5 5 # Project: Demonstrator
6   -# File: get_parities_from_indices.py
7   -# Date : 2016-10-12
  6 +# File: get_response.py
  7 +# Date : 2016-11-03
8 8  
9 9 import Tkinter
10 10  
  11 +import board_commands
  12 +
11 13 def get_response():
12 14  
13   - """Get the parities of the on-board response blocks.
  15 + """Get the response from the board"""
14 16  
15   - Hardware-specific implementation.
16   - >>> get_parities_from_indices([[1, 5], [7, 2], [3, 6], [4, 0]])
17   - [0, 1, 1, 0]
18   - """
19   -
20 17 tclsh = Tkinter.Tcl()
21   - parities = []
22   - tclsh.eval("source {HECTOR_data_acq.tcl}")
23   - tclsh.eval("set dev [openDevice COM5]")
24   - tclsh.eval("puts \"Resetting the board\"")
25   - tclsh.eval("softReset $dev")
26   - tclsh.eval("sendDaughterReset $dev")
27   - tclsh.eval("sendFabricReset $dev")
28   - tclsh.eval("sendFabricCommand $dev 4 7") # generate response from LFSR
29   - tclsh.eval("sendFabricCommand $dev 4 3") # offload response to MB
30   - tclsh.eval("set rep \"[sendFabricCommand $dev 5 3] [sendFabricCommand $dev 5 2] [sendFabricCommand $dev 5 1] [sendFabricCommand $dev 5 0]\"") # Display response block 0
31   - tclsh.eval("regsub -all { } $rep {} rep")
32   - response = tclsh.eval("return $rep")
  18 + board_manager = board_commands.Board_manager()
  19 + tclsh.eval(board_manager.source_tcl_package())
  20 + tclsh.eval(board_manager.connect("COM5"))
  21 + response = tclsh.eval(board_manager.generate_and_offload_response())
33 22 return response
34 23  
35 24  
parsers/build_verilog_df.py View file @ 757796e
  1 +# -*- coding: utf-8 -*-
  2 +"""
  3 +Author : Brice Colombier
  4 +Affiliation: Laboratoire Hubert Curien, UMR CNRS 5516
  5 + University of Lyon
  6 + 18 rue du Professeur Benoit Lauras
  7 + 42000 Saint-Etienne - France
  8 +Contact : b.colombier@univ-st-etienne.fr
  9 +
  10 +Title : Building the graph from the netlist
  11 +Project : Graph-based nodes selection for functional locking
  12 +
  13 +File : build_def.py
  14 +Last update: 2015-03-17
  15 +"""
  16 +from __future__ import print_function
  17 +import igraph as ig
  18 +import pyparsing as pp
  19 +import time
  20 +import clean_const
  21 +import sys
  22 +import os
  23 +
  24 +def adv_add_edges(g, es, instances, **kwds):
  25 + """adv_add_edges(es, **kwds)
  26 +
  27 + Adds multiple edges to the graph with a unique set of keywords.
  28 +
  29 + Keyword arguments (except the source and target arguments) will be
  30 + assigned to added edges as attributes.
  31 + @param g : the graph where to add given edges
  32 + @param es: list of source - dest tuples to add
  33 + @param instances: list of instances names to add corresponding to edges
  34 + @param **kwds : attributes to add to all the edges
  35 +
  36 + @return result of igraph.add_edges()
  37 + """
  38 +
  39 + if len(es) != len(instances):
  40 + raise Exception('Length of es different than length of instances')
  41 + if not kwds:
  42 + return g.add_edges(es)
  43 +# Getting next edge ID
  44 + eid = g.ecount()
  45 +# Adding all the edges from es to the graphe
  46 + result = g.add_edges(es)
  47 +# Adding Keywords and instance name to all the edges
  48 + for i, _ in enumerate(es):
  49 + for key, value in kwds.iteritems():
  50 + g.es[eid + i][key] = value
  51 + g.es[eid + i]['instance'] = instances[i]
  52 + return result
  53 +
  54 +def priorityFind(inputs_str):
  55 + """priorityFind(inputs_str)
  56 +
  57 + Convert an inputs combination string with priority between parenthesis
  58 +
  59 + @param inputs_str: inputs combination string
  60 +
  61 + @return an array of string corresponding to priority blocks sorted by level (0 is the lowest level)
  62 + """
  63 +# Initializing some vars
  64 + prio_level = 0 #Save the current priority level
  65 + temp_storage = ['']#Save the blocs in progress of lower priority levels
  66 + bloc_count = [0] #Counters of blocs of each level to generate reference between levels
  67 + result = [''] #The result which will be returned
  68 +# reading all characters one by one from the logical equation
  69 + for idl, letter in enumerate(inputs_str):
  70 +# Beginning of a new bloc of higher level
  71 + if letter == '(':
  72 +# Adding a new temporary bloc
  73 + if temp_storage[prio_level][-1] == '~':
  74 + temp_storage.append('#')
  75 + temp_storage[prio_level] = temp_storage[prio_level][:-3]
  76 + else:
  77 + temp_storage.append('')
  78 +# Adding a counter to bloc_count if it doesn't exist yet
  79 + if len(bloc_count) == prio_level + 1:
  80 + bloc_count.append(0)
  81 + result.append([])
  82 +# Increasing the value else
  83 + else:
  84 + bloc_count[prio_level + 1] += 1
  85 +# Adding the reference in the bloc in progress and increasing priority level
  86 + temp_storage[prio_level] += '{' + str(bloc_count[prio_level + 1]) + '}'
  87 + prio_level += 1
  88 +# Ending of a priority bloc
  89 + elif letter == ')':
  90 +# Transfering prcessed bloc to the result and decreasing priority level
  91 + result[prio_level].append(temp_storage.pop())
  92 + prio_level -= 1
  93 +# In any other case, copying letter to the processing bloc
  94 + else:
  95 + temp_storage[prio_level] += letter
  96 +
  97 +# Copying the lower level bloc in the results
  98 + result[0] = [temp_storage[0]]
  99 +# Splitting blocs strings to make processing easier after
  100 + inputs_array = []
  101 + for i, level in enumerate(result):
  102 + inputs_array.append([])
  103 + for j, block in enumerate(level):
  104 + if block[0] == '#':
  105 + block = block[1:]
  106 + complemented = True
  107 + else:
  108 + complemented = False
  109 + inputs_array[i].append([])
  110 + for k, or_branch in enumerate(block.split(' | ')):
  111 + if ' & ' in or_branch:
  112 + operator = ' & '
  113 + inputs_array[i][j].append(['and'])
  114 + elif ' ^ ' in or_branch:
  115 + operator = ' ^ '
  116 + inputs_array[i][j].append(['xor'])
  117 + elif ' ~^ ' in or_branch:
  118 + operator = ' ~^ '
  119 + inputs_array[i][j].append(['xnor'])
  120 + elif ' ^~ ' in or_branch:
  121 + operator = ' ^~ '
  122 + inputs_array[i][j].append(['xnor'])
  123 + else:
  124 + operator = 'none'
  125 + inputs_array[i][j].append(['none'])
  126 + inputs_array[i][j][k].extend(filter(None, or_branch.split(operator)))
  127 + if complemented:
  128 + inputs_array[i][j][k].append('#')
  129 +
  130 + return inputs_array
  131 +
  132 +def AddIO(toks, g, prim_in, prim_out, nodes):
  133 + """AddIO(toks)
  134 +
  135 + Add Inputs and Outputs to the graph
  136 +
  137 + @param toks: pyparsing result for the edif syntax
  138 + toks[0] : external libraries list
  139 + toks[1] : internal cells list
  140 + toks[2] : described cell's name
  141 + @param g: igraph instance to complete with inputs and outputs
  142 + @param prim_in: list containing inputs names
  143 + @param prim_out: list containing outputs names
  144 + @param nodes: list to complete with signal which can be inputs of functions
  145 + @param top_cell: ID of the top cell in the results list
  146 + """
  147 + print("AddIO") #Debug
  148 +# Reading all the IO bus of the global cell
  149 + for IODecl in toks[0]:
  150 + InputOutput = IODecl[1]
  151 + Dir = IODecl[0]
  152 +# Adding ports
  153 + if Dir == "input":
  154 + if not InputOutput in prim_in:
  155 + g.add_vertex(InputOutput,
  156 + label=InputOutput,
  157 + color="#DDDDDD",
  158 + cat="input",
  159 + locks=[],
  160 + forced=[],
  161 + size=100,
  162 + label_size=30)
  163 + prim_in.append(InputOutput)
  164 + nodes.append(InputOutput)
  165 +
  166 + elif Dir == "output":
  167 + if (not InputOutput in prim_in) and (not InputOutput in prim_out):
  168 + prim_out.append(InputOutput)
  169 + g.add_vertex(InputOutput,
  170 + label=InputOutput,
  171 + color="#666666",
  172 + cat="output",
  173 + locks=[0, 1],#By convention
  174 + forced=[],
  175 + size=100,
  176 + label_size=30)
  177 +
  178 +def AddGate(toks, g, prim_in, prim_out, nodes, name_conv):
  179 + """AddGate(toks)
  180 +
  181 + Add edges to the graph
  182 +
  183 + @param toks: pyparsing result for the edif syntax
  184 + toks[0] : Inputs list
  185 + toks[1] : Outputs list
  186 + toks[2] : Nodes' link list
  187 + toks[3] : logical functions list
  188 + @param g: igraph instance to complete with inputs and outputs
  189 + @param prim_in: list containing inputs names
  190 + @param prim_out: list containing outputs names
  191 + @param nodes: list to fill with signal which can be inputs of functions
  192 + @param name_conv: dictionnary containing nodes name to be replace with an other name
  193 + """
  194 + print("AddGates") #Debug
  195 +# Initializing some vars
  196 + to_add_edges = {'and':[[],[]], 'nand':[[],[]], 'or':[[],[]], 'nor':[[],[]], 'not':[[],[]], 'buf':[[],[]], 'xor':[[],[]], 'xnor':[[],[]]}
  197 + complement = {'and':'nand', 'nand':'and', 'or':'nor', 'nor':'or', 'xor':'xnor', 'xnor':'xor'}
  198 + add_vertices = [] #list of vertices to add
  199 + instance_counter = 0#counter to generate instances names
  200 +
  201 + add_vertices.extend(toks[1])
  202 +
  203 +# Initializing progress display
  204 + nb = 0
  205 + disp_time = time.time()
  206 + nbdisp = '0'
  207 + print('-> ' + str(len(toks[2])) + '/' + nbdisp, end='')
  208 +# Reading all the logical functions
  209 + for Def in toks[2]:
  210 +# Updating progress display every 0.5s
  211 + nb += 1
  212 + if time.time() >= disp_time + 0.5:
  213 + disp_time += 0.5
  214 + for _ in nbdisp:
  215 + print('\b', end='')
  216 + nbdisp = str(nb)
  217 + print(nbdisp, end='')
  218 +
  219 + output = Def[0]
  220 +# Getting inputs array to process
  221 + inputs_array = priorityFind(Def[1].replace('\n', ''))
  222 +# Reading all the priority level from the array (from the higher to the lower)
  223 + for i, level in reversed(list(enumerate(inputs_array))):
  224 + if (i == 0) and (len(level) == 1) and (len(level[0]) == 1) and (len(level[0][0]) == 2) and (level[0][0][1][0] == '{'):
  225 + continue
  226 +# Reading all the blocks from the priority level
  227 + for j, block in enumerate(level):
  228 +# Checking if a global or is needed
  229 + if len(block) > 1:
  230 + global_or = True
  231 + else:
  232 + global_or = False
  233 +# Generating an internal name for blocs except for the last one to built
  234 + if (i > 1) or ((i == 1) and (len(inputs_array[0]) > 1)):
  235 + block_out = "Or{0}{1}_{2}".format(i, j, output)
  236 + add_vertices.append(block_out)
  237 + else:
  238 + block_out = output
  239 +# initializing an array which will contain and output nodes to link
  240 + block_ands = []
  241 +# setting the complementation state
  242 + complemented = False
  243 +# Reading and gates in the block
  244 + for k, and_ins in enumerate(block):
  245 + if and_ins == '#':
  246 + if complemented:
  247 + complemented = False
  248 + else:
  249 + complemented = True
  250 + continue
  251 +# If a global or is needed, generating the internal name for the and output
  252 + if global_or:
  253 + inner_name = "And{0}{1}{2}_{3}".format(i, j, k, output)
  254 + add_vertices.append(inner_name)
  255 + block_ands.append(inner_name)
  256 +# Else, using block output as and output
  257 + else:
  258 + inner_name = block_out
  259 +# Initializing an array which will contain edges (source, target)
  260 + edges_list = []
  261 +# Reading all inputs for the and function
  262 + for signal in and_ins:
  263 +# If the input is '#', changing the complementation state of the block
  264 + if signal == '#':
  265 + if complemented:
  266 + complemented = False
  267 + else:
  268 + complemented = True
  269 + elif signal in ['and', 'nand', 'nor', 'xor', 'xnor', 'none']:
  270 + function = signal
  271 + elif signal[0] == '{':
  272 + edges_list.append(("Or{0}{1}_{2}".format(i+1, signal[1:-1], output),inner_name))
  273 +# If the signal end with ('), using a complemented version of the input
  274 + else:
  275 + edges_list.append((signal, inner_name))
  276 +# If more than 1 inputs
  277 + if len(edges_list) > 1:
  278 +# Using a NAND only if ther is no global or and the bloc is complemented
  279 + if (not global_or) and complemented:
  280 + to_add_edges[complement[function]][0].extend(edges_list)
  281 + to_add_edges[complement[function]][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
  282 + instance_counter += 1
  283 +# Else, using an AND
  284 + else:
  285 + to_add_edges[function][0].extend(edges_list)
  286 + to_add_edges[function][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
  287 + instance_counter += 1
  288 +# If only 1 input, no need to use an AND or a NAND
  289 + else:
  290 +
  291 + if not global_or and complemented:
  292 + to_add_edges['not'][0].append(edges_list[0])
  293 + to_add_edges['not'][1].append('U' + str(instance_counter))
  294 + instance_counter += 1
  295 + elif not global_or and not complemented:
  296 + to_add_edges['buf'][0].append(edges_list[0])
  297 + to_add_edges['buf'][1].append('U' + str(instance_counter))
  298 + instance_counter += 1
  299 + else:
  300 + block_ands.append(edges_list[0][0])
  301 +# If a global or is needed
  302 + if global_or:
  303 +# building all the edge beteen block_ands and block_out
  304 + edges_list = []
  305 + for and_signal in block_ands:
  306 + edges_list.append((and_signal, block_out))
  307 +# Adding edges to to_add_edges with the right function depending on complmented
  308 + if complemented:
  309 + to_add_edges['nor'][0].extend(edges_list)
  310 + to_add_edges['nor'][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
  311 + instance_counter += 1
  312 + else:
  313 + to_add_edges['or'][0].extend(edges_list)
  314 + to_add_edges['or'][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
  315 + instance_counter += 1
  316 +# displaying the last progress number
  317 + for _ in nbdisp:
  318 + print('\b', end='')
  319 + print(str(nb))
  320 +# Adding all vertices to the graphn
  321 + for vertex in add_vertices:
  322 + if (not vertex in prim_in) and (not vertex in prim_out):
  323 + g.add_vertex(vertex,
  324 + label=vertex,
  325 + color="#FFFFFF",
  326 + cat="node",
  327 + locks=[],
  328 + forced=[],
  329 + size=100,
  330 + label_size=30)
  331 +# Adding all edges to the graph ordered by function
  332 + for i in sorted(to_add_edges, key=lambda i: len(to_add_edges[i][0]), reverse=False):
  333 + if len(to_add_edges[i][0]) > 0:
  334 + print('-> Adding ' + str(len(to_add_edges[i][0])) + ' ' + i + '...')
  335 + adv_add_edges(g, to_add_edges[i][0], to_add_edges[i][1],
  336 + name=i,
  337 + label=i,
  338 + width=5,
  339 + arrow_size=2,
  340 + label_size=40,
  341 + color="#AAAAAA")
  342 +
  343 +def build(netlist, result=None, parsedbg=False, verbose=True):
  344 + """build(name, result=None, parsedbg=False)
  345 + Builds the graph from an EDIF netlist
  346 + @param netlist: name of the netlist to build
  347 + @keyword result(None): result set from the edif syntax reader. Avoid reading and use set given instead. Used mainly for debugging.
  348 + @keyword parsebg(False): Boolean used to disable graph generating and return the syntax reader set of the netlist choosen. Used mainly for debugging.
  349 +
  350 + @return a tuple of 4 elements : the graph for the netlist, the list of inputs, the list of outputs, the list of node (inputs and internal nodes)
  351 + """
  352 + print("---- Build Verilog RTL")
  353 + if not verbose:
  354 + sys.stdout = open(os.devnull, 'w')
  355 + g = ig.Graph(directed=1)
  356 + prim_in = [] #primary inputs of the netlist
  357 + prim_out = [] #primary outputs of the netlist
  358 + nodes = [] #nodes of the netlist
  359 + name_conv = {}
  360 +# Setting basic elements
  361 + name = pp.Word(pp.alphanums + '_\[')
  362 + port = pp.Group(pp.Or(['input', 'output']) + name + pp.Suppress(pp.Literal(';')))
  363 +
  364 + signals = pp.Group(pp.OneOrMore(pp.Suppress(pp.CaselessLiteral('wire')) + name + pp.Suppress(pp.Literal(';'))))
  365 +
  366 + content = pp.Group(pp.OneOrMore(pp.Group(pp.Suppress(pp.Literal('assign')) + name + pp.Suppress(pp.Literal('=')) + pp.SkipTo(pp.Literal(';')) + pp.Suppress(pp.Literal(';')))))
  367 + module = pp.Suppress(pp.CaselessLiteral('module') + name + pp.CaselessLiteral('(') + pp.OneOrMore(name + pp.Literal(',')) + name + pp.Literal(');')) + pp.Group(pp.OneOrMore(port))
  368 + architecture = signals + content + pp.Suppress(pp.CaselessLiteral('endmodule'))
  369 +
  370 + FullParser = pp.Suppress(pp.SkipTo(module)) + module + architecture
  371 +# Opening netlist file
  372 + source_file = netlist
  373 +# Reading the file if no results given
  374 + if result == None:
  375 + print("Parsing...")
  376 + result = FullParser.parseFile(source_file)
  377 + print("---- Done\n")
  378 +# Returning only the result from the file if asked by user
  379 + if parsedbg:
  380 + if not verbose:
  381 + sys.stdout = sys.__stdout__
  382 + print('---- Done (parsedbg)\n')
  383 + return result
  384 +# Building and returning the graphe
  385 + else:
  386 + AddIO(result, g, prim_in, prim_out, nodes)
  387 + AddGate(result, g, prim_in, prim_out, nodes, name_conv)
  388 + print("---- Done\n")
  389 + g = clean_const.clean_not_buf(g)
  390 + if not verbose:
  391 + sys.stdout = sys.__stdout__
  392 + print('---- Done\n')
  393 +
  394 + return g, prim_in, prim_out, nodes
  395 +
  396 +if __name__ == "__main__":
  397 + build("./addsub_df.v")
parsers/build_verilog_rtl.py View file @ 757796e
1   -# -*- coding: utf-8 -*-
2   -"""
3   -Author : Brice Colombier
4   -Affiliation: Laboratoire Hubert Curien, UMR CNRS 5516
5   - University of Lyon
6   - 18 rue du Professeur Benoit Lauras
7   - 42000 Saint-Etienne - France
8   -Contact : b.colombier@univ-st-etienne.fr
9   -
10   -Title : Building the graph from the netlist
11   -Project : Graph-based nodes selection for functional locking
12   -
13   -File : build_def.py
14   -Last update: 2015-03-17
15   -"""
16   -from __future__ import print_function
17   -import igraph as ig
18   -import pyparsing as pp
19   -import time
20   -import clean_const
21   -import sys
22   -import os
23   -
24   -def adv_add_edges(g, es, instances, **kwds):
25   - """adv_add_edges(es, **kwds)
26   -
27   - Adds multiple edges to the graph with a unique set of keywords.
28   -
29   - Keyword arguments (except the source and target arguments) will be
30   - assigned to added edges as attributes.
31   - @param g : the graph where to add given edges
32   - @param es: list of source - dest tuples to add
33   - @param instances: list of instances names to add corresponding to edges
34   - @param **kwds : attributes to add to all the edges
35   -
36   - @return result of igraph.add_edges()
37   - """
38   -
39   - if len(es) != len(instances):
40   - raise Exception('Length of es different than length of instances')
41   - if not kwds:
42   - return g.add_edges(es)
43   -# Getting next edge ID
44   - eid = g.ecount()
45   -# Adding all the edges from es to the graphe
46   - result = g.add_edges(es)
47   -# Adding Keywords and instance name to all the edges
48   - for i, _ in enumerate(es):
49   - for key, value in kwds.iteritems():
50   - g.es[eid + i][key] = value
51   - g.es[eid + i]['instance'] = instances[i]
52   - return result
53   -
54   -def priorityFind(inputs_str):
55   - """priorityFind(inputs_str)
56   -
57   - Convert an inputs combination string with priority between parenthesis
58   -
59   - @param inputs_str: inputs combination string
60   -
61   - @return an array of string corresponding to priority blocks sorted by level (0 is the lowest level)
62   - """
63   -# Initializing some vars
64   - prio_level = 0 #Save the current priority level
65   - temp_storage = ['']#Save the blocs in progress of lower priority levels
66   - bloc_count = [0] #Counters of blocs of each level to generate reference between levels
67   - result = [''] #The result which will be returned
68   -# reading all characters one by one from the logical equation
69   - for idl, letter in enumerate(inputs_str):
70   -# Beginning of a new bloc of higher level
71   - if letter == '(':
72   -# Adding a new temporary bloc
73   - if temp_storage[prio_level][-1] == '~':
74   - temp_storage.append('#')
75   - temp_storage[prio_level] = temp_storage[prio_level][:-3]
76   - else:
77   - temp_storage.append('')
78   -# Adding a counter to bloc_count if it doesn't exist yet
79   - if len(bloc_count) == prio_level + 1:
80   - bloc_count.append(0)
81   - result.append([])
82   -# Increasing the value else
83   - else:
84   - bloc_count[prio_level + 1] += 1
85   -# Adding the reference in the bloc in progress and increasing priority level
86   - temp_storage[prio_level] += '{' + str(bloc_count[prio_level + 1]) + '}'
87   - prio_level += 1
88   -# Ending of a priority bloc
89   - elif letter == ')':
90   -# Transfering prcessed bloc to the result and decreasing priority level
91   - result[prio_level].append(temp_storage.pop())
92   - prio_level -= 1
93   -# In any other case, copying letter to the processing bloc
94   - else:
95   - temp_storage[prio_level] += letter
96   -
97   -# Copying the lower level bloc in the results
98   - result[0] = [temp_storage[0]]
99   -# Splitting blocs strings to make processing easier after
100   - inputs_array = []
101   - for i, level in enumerate(result):
102   - inputs_array.append([])
103   - for j, block in enumerate(level):
104   - if block[0] == '#':
105   - block = block[1:]
106   - complemented = True
107   - else:
108   - complemented = False
109   - inputs_array[i].append([])
110   - for k, or_branch in enumerate(block.split(' | ')):
111   - if ' & ' in or_branch:
112   - operator = ' & '
113   - inputs_array[i][j].append(['and'])
114   - elif ' ^ ' in or_branch:
115   - operator = ' ^ '
116   - inputs_array[i][j].append(['xor'])
117   - elif ' ~^ ' in or_branch:
118   - operator = ' ~^ '
119   - inputs_array[i][j].append(['xnor'])
120   - elif ' ^~ ' in or_branch:
121   - operator = ' ^~ '
122   - inputs_array[i][j].append(['xnor'])
123   - else:
124   - operator = 'none'
125   - inputs_array[i][j].append(['none'])
126   - inputs_array[i][j][k].extend(filter(None, or_branch.split(operator)))
127   - if complemented:
128   - inputs_array[i][j][k].append('#')
129   -
130   - return inputs_array
131   -
132   -def AddIO(toks, g, prim_in, prim_out, nodes):
133   - """AddIO(toks)
134   -
135   - Add Inputs and Outputs to the graph
136   -
137   - @param toks: pyparsing result for the edif syntax
138   - toks[0] : external libraries list
139   - toks[1] : internal cells list
140   - toks[2] : described cell's name
141   - @param g: igraph instance to complete with inputs and outputs
142   - @param prim_in: list containing inputs names
143   - @param prim_out: list containing outputs names
144   - @param nodes: list to complete with signal which can be inputs of functions
145   - @param top_cell: ID of the top cell in the results list
146   - """
147   - print("AddIO") #Debug
148   -# Reading all the IO bus of the global cell
149   - for IODecl in toks[0]:
150   - InputOutput = IODecl[1]
151   - Dir = IODecl[0]
152   -# Adding ports
153   - if Dir == "input":
154   - if not InputOutput in prim_in:
155   - g.add_vertex(InputOutput,
156   - label=InputOutput,
157   - color="#DDDDDD",
158   - cat="input",
159   - locks=[],
160   - forced=[],
161   - size=100,
162   - label_size=30)
163   - prim_in.append(InputOutput)
164   - nodes.append(InputOutput)
165   -
166   - elif Dir == "output":
167   - if (not InputOutput in prim_in) and (not InputOutput in prim_out):
168   - prim_out.append(InputOutput)
169   - g.add_vertex(InputOutput,
170   - label=InputOutput,
171   - color="#666666",
172   - cat="output",
173   - locks=[0, 1],#By convention
174   - forced=[],
175   - size=100,
176   - label_size=30)
177   -
178   -def AddGate(toks, g, prim_in, prim_out, nodes, name_conv):
179   - """AddGate(toks)
180   -
181   - Add edges to the graph
182   -
183   - @param toks: pyparsing result for the edif syntax
184   - toks[0] : Inputs list
185   - toks[1] : Outputs list
186   - toks[2] : Nodes' link list
187   - toks[3] : logical functions list
188   - @param g: igraph instance to complete with inputs and outputs
189   - @param prim_in: list containing inputs names
190   - @param prim_out: list containing outputs names
191   - @param nodes: list to fill with signal which can be inputs of functions
192   - @param name_conv: dictionnary containing nodes name to be replace with an other name
193   - """
194   - print("AddGates") #Debug
195   -# Initializing some vars
196   - to_add_edges = {'and':[[],[]], 'nand':[[],[]], 'or':[[],[]], 'nor':[[],[]], 'not':[[],[]], 'buf':[[],[]], 'xor':[[],[]], 'xnor':[[],[]]}
197   - complement = {'and':'nand', 'nand':'and', 'or':'nor', 'nor':'or', 'xor':'xnor', 'xnor':'xor'}
198   - add_vertices = [] #list of vertices to add
199   - instance_counter = 0#counter to generate instances names
200   -
201   - add_vertices.extend(toks[1])
202   -
203   -# Initializing progress display
204   - nb = 0
205   - disp_time = time.time()
206   - nbdisp = '0'
207   - print('-> ' + str(len(toks[2])) + '/' + nbdisp, end='')
208   -# Reading all the logical functions
209   - for Def in toks[2]:
210   -# Updating progress display every 0.5s
211   - nb += 1
212   - if time.time() >= disp_time + 0.5:
213   - disp_time += 0.5
214   - for _ in nbdisp:
215   - print('\b', end='')
216   - nbdisp = str(nb)
217   - print(nbdisp, end='')
218   -
219   - output = Def[0]
220   -# Getting inputs array to process
221   - inputs_array = priorityFind(Def[1].replace('\n', ''))
222   -# Reading all the priority level from the array (from the higher to the lower)
223   - for i, level in reversed(list(enumerate(inputs_array))):
224   - if (i == 0) and (len(level) == 1) and (len(level[0]) == 1) and (len(level[0][0]) == 2) and (level[0][0][1][0] == '{'):
225   - continue
226   -# Reading all the blocks from the priority level
227   - for j, block in enumerate(level):
228   -# Checking if a global or is needed
229   - if len(block) > 1:
230   - global_or = True
231   - else:
232   - global_or = False
233   -# Generating an internal name for blocs except for the last one to built
234   - if (i > 1) or ((i == 1) and (len(inputs_array[0]) > 1)):
235   - block_out = "Or{0}{1}_{2}".format(i, j, output)
236   - add_vertices.append(block_out)
237   - else:
238   - block_out = output
239   -# initializing an array which will contain and output nodes to link
240   - block_ands = []
241   -# setting the complementation state
242   - complemented = False
243   -# Reading and gates in the block
244   - for k, and_ins in enumerate(block):
245   - if and_ins == '#':
246   - if complemented:
247   - complemented = False
248   - else:
249   - complemented = True
250   - continue
251   -# If a global or is needed, generating the internal name for the and output
252   - if global_or:
253   - inner_name = "And{0}{1}{2}_{3}".format(i, j, k, output)
254   - add_vertices.append(inner_name)
255   - block_ands.append(inner_name)
256   -# Else, using block output as and output
257   - else:
258   - inner_name = block_out
259   -# Initializing an array which will contain edges (source, target)
260   - edges_list = []
261   -# Reading all inputs for the and function
262   - for signal in and_ins:
263   -# If the input is '#', changing the complementation state of the block
264   - if signal == '#':
265   - if complemented:
266   - complemented = False
267   - else:
268   - complemented = True
269   - elif signal in ['and', 'nand', 'nor', 'xor', 'xnor', 'none']:
270   - function = signal
271   - elif signal[0] == '{':
272   - edges_list.append(("Or{0}{1}_{2}".format(i+1, signal[1:-1], output),inner_name))
273   -# If the signal end with ('), using a complemented version of the input
274   - else:
275   - edges_list.append((signal, inner_name))
276   -# If more than 1 inputs
277   - if len(edges_list) > 1:
278   -# Using a NAND only if ther is no global or and the bloc is complemented
279   - if (not global_or) and complemented:
280   - to_add_edges[complement[function]][0].extend(edges_list)
281   - to_add_edges[complement[function]][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
282   - instance_counter += 1
283   -# Else, using an AND
284   - else:
285   - to_add_edges[function][0].extend(edges_list)
286   - to_add_edges[function][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
287   - instance_counter += 1
288   -# If only 1 input, no need to use an AND or a NAND
289   - else:
290   -
291   - if not global_or and complemented:
292   - to_add_edges['not'][0].append(edges_list[0])
293   - to_add_edges['not'][1].append('U' + str(instance_counter))
294   - instance_counter += 1
295   - elif not global_or and not complemented:
296   - to_add_edges['buf'][0].append(edges_list[0])
297   - to_add_edges['buf'][1].append('U' + str(instance_counter))
298   - instance_counter += 1
299   - else:
300   - block_ands.append(edges_list[0][0])
301   -# If a global or is needed
302   - if global_or:
303   -# building all the edge beteen block_ands and block_out
304   - edges_list = []
305   - for and_signal in block_ands:
306   - edges_list.append((and_signal, block_out))
307   -# Adding edges to to_add_edges with the right function depending on complmented
308   - if complemented:
309   - to_add_edges['nor'][0].extend(edges_list)
310   - to_add_edges['nor'][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
311   - instance_counter += 1
312   - else:
313   - to_add_edges['or'][0].extend(edges_list)
314   - to_add_edges['or'][1].extend(['U' + str(instance_counter) for x in range(len(edges_list))])
315   - instance_counter += 1
316   -# displaying the last progress number
317   - for _ in nbdisp:
318   - print('\b', end='')
319   - print(str(nb))
320   -# Adding all vertices to the graphn
321   - for vertex in add_vertices:
322   - if (not vertex in prim_in) and (not vertex in prim_out):
323   - g.add_vertex(vertex,
324   - label=vertex,
325   - color="#FFFFFF",
326   - cat="node",
327   - locks=[],
328   - forced=[],
329   - size=100,
330   - label_size=30)
331   -# Adding all edges to the