Skip to content

Commit

Permalink
Bug fixes and improvements:
Browse files Browse the repository at this point in the history
- Fixed an issue, which caused a wrong return value when specifying return_decompressed_model in decompress_model
- compress_model now also accepts arrays of type uint8, uint16, int8, int16 and float16 (but internally converts them into int32 or float32)
- improved guessing of block_id and parameter_types for TensorFlow and PyTorch
- NNCodec now accepts to only specify a subset of the tensors for block_id_and_param_type
  • Loading branch information
phaase-hhi committed Apr 17, 2023
1 parent 8de1716 commit 5b91f18
Show file tree
Hide file tree
Showing 7 changed files with 218 additions and 105 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ source env/bin/activate

**Note**: For further information on how to set up a virtual python environment (also on **Windows**) refer to https://docs.python.org/3/library/venv.html .

When successfully installed, the software outputs the line : "Successfully installed NNC-0.1.8"
When successfully installed, the software outputs the line : "Successfully installed NNC-0.2.0"

### Importing the main module

Expand Down
99 changes: 68 additions & 31 deletions framework/pytorch_model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,17 +291,28 @@ def init_model_from_dict(self, pt_dict):
model_dict = pt_dict.state_dict()

model_data = {'parameters': {}, 'reduction_method': 'baseline'}
model_info = {'parameter_type': {}, 'parameter_dimensions': {}, 'parameter_index': {}, 'block_identifier': {},
model_info = {'parameter_type': {}, 'parameter_dimensions': {}, 'parameter_index': {}, 'block_identifier': {}, 'original_size': {},
'topology_storage_format': nnc_core.nnr_model.TopologyStorageFormat.NNR_TPL_PYT,
'topology_compression_format': nnc_core.nnr_model.TopologyCompressionFormat.NNR_PT_RAW}

# metadata only needed for MNASNet from PYT model zoo... further work: include into bitstream
# self.metadata = getattr(model_dict, '_metadata', None)

type_list_int = ['int8', 'int16', 'int32', 'uint8', 'uint16', 'uint32']
type_list_1_bytes = ['int8', 'uint8']
type_list_2_bytes = ['int16', 'uint16', 'float16']
original_size = 0

for i, module_name in enumerate(model_dict):
if '.num_batches_tracked' in module_name:
continue
model_data['parameters'][module_name] = model_dict[module_name].data.cpu().detach().numpy()
if model_dict[module_name].data.cpu().detach().numpy().dtype in type_list_1_bytes:
original_size += model_dict[module_name].numel()
elif model_dict[module_name].data.cpu().detach().numpy().dtype in type_list_2_bytes:
original_size += model_dict[module_name].numel()*2
else:
original_size += model_dict[module_name].numel()*4
model_data['parameters'][module_name] = np.int32(model_dict[module_name].data.cpu().detach().numpy()) if model_dict[module_name].data.cpu().detach().numpy().dtype in type_list_int else model_dict[module_name].data.cpu().detach().numpy()
if '.weight_scaling' in module_name:
model_data['parameters'][module_name] = model_data['parameters'][module_name].flatten()
mdl_shape = model_data['parameters'][module_name].shape
Expand All @@ -326,13 +337,16 @@ def init_model_from_dict(self, pt_dict):
model_info['parameter_type'][module_name] = 'bn.var'
elif '.weight_scaling' in module_name:
model_info['parameter_type'][module_name] = 'weight.ls'
elif '.weight' in module_name:
elif 'gamma' in module_name:
model_info['parameter_type'][module_name] = 'bn.gamma'
elif '.weight' in module_name:
model_info['parameter_type'][module_name] = "weight"
else:
model_info['parameter_type'][module_name] = 'unspecified'
else:
model_info['parameter_type'][module_name] = 'unspecified'

model_info["original_size"] = original_size

self.__model_info = model_info

Expand All @@ -354,31 +368,37 @@ def guess_block_id_and_param_type(self, model_parameters):
blkNum = -1
for param in model_parameters.keys():
dims = len(model_parameters[param].shape)
paramShape = model_parameters[param].shape
splitted_param = param.split(".")
param_end = splitted_param[-1]
base_block_id = ".".join(splitted_param[0:-2]+[""]) if len(splitted_param[0:-2]) != 0 else "genericBlk."

base_block_id = ".".join(splitted_param[0:-1]+[""]) if len(splitted_param[0:-1]) != 0 else "genericBlk."


if dims > 1 and ('kernel' in param_end or 'weight' in param_end):
paramType = 'weight'
blockId = base_block_id #block_id
blockId = base_block_id
elif dims > 1:
paramType = 'weight'
blockId = base_block_id
elif dims == 1:
if 'bias' in param_end or 'beta' in param_end: ##could also be bn.beta
if 'bias' in param_end or 'beta' in param_end:
paramType = 'bias'
blockId = base_block_id #block_id
blockId = base_block_id
elif 'running_mean' in param_end or 'moving_mean' in param_end:
paramType = 'bn.mean'
blockId = base_block_id #block_id
blockId = base_block_id
elif 'running_var' in param_end or 'moving_variance' in param_end:
paramType = 'bn.var'
blockId = base_block_id #block_id
blockId = base_block_id
elif 'weight_scaling' in param_end:
paramType = 'weight.ls'
blockId = base_block_id
elif 'weight' in param_end or 'gamma' in param_end:
elif 'gamma' in param_end:
paramType = 'bn.gamma'
blockId = base_block_id #block_id
blockId = base_block_id
elif 'weight' in param_end:
paramType = 'weight'
blockId = base_block_id
else:
paramType = 'unspecified'
blockId = None
Expand All @@ -401,29 +421,46 @@ def guess_block_id_and_param_type(self, model_parameters):
if block_id not in block_dict.keys():
block_dict[block_id] = []

block_dict[block_id].append( [param, paramType, blockId] )
block_dict[block_id].append( [param, paramType, blockId, dims, paramShape] )
else:
block_id_and_param_type["parameter_type"][param] = paramType
block_id_and_param_type["block_identifier"][param] = blockId

weight_block_list = []
bn_block_list = []

lastBlkId = None
for block_list in block_dict.values():
lsa_enabled = any('weight.ls' in l for l in block_list)
blk_len = 6 if lsa_enabled else 5
hasWeightParam = False
for par, parT, blkId in block_list:
if lastBlkId != None:
blkId = lastBlkId
if any(["bn." in a[1] for a in block_list]):
for i, val in enumerate(block_list):
par, parT, blkId, dims, _ = val
if parT == 'weight' and dims == 1:
block_list[i][1] = "bn.gamma"
if parT == 'bias':
block_list[i][1] = "bn.beta"
bn_block_list.append( block_list )
else:
weight_block_list.append(block_list)

weight_shape = None
weight_blkId = None
for weight_block in weight_block_list:
weight_shape = None
weight_blkId = None
for par, parT, blkId, dims, paramSh in weight_block:
block_id_and_param_type["parameter_type"][par] = parT
block_id_and_param_type["block_identifier"][par] = blkId
if parT == 'weight':
hasWeightParam = True
if (len(block_list) == blk_len and parT == 'bias') or ( len(block_list) == 6 and parT == 'bias' and 'beta' in par ): ## In this case bias in bn.beta
block_id_and_param_type["parameter_type"][par] = "bn.beta"
block_list[1]="bn.beta"
else:
weight_shape = paramSh
weight_blkId = blkId

if len(bn_block_list) != 0 and any([dim == bn_block_list[0][0][4][0] for dim in weight_shape]):
bn_block = bn_block_list.pop(0)
for par, parT, _, _, _ in bn_block:
block_id_and_param_type["parameter_type"][par] = parT
block_id_and_param_type["block_identifier"][par] = blkId

if hasWeightParam == False: #incomplete block detected
lastBlkId = blkId
else:
lastBlkId = None
block_id_and_param_type["block_identifier"][par] = weight_blkId

assert len(bn_block_list) == 0

except:
print("INFO: Guessing of block_id_and_parameter_type failed! block_id_and_parameter_type has been set to 'None'!")
block_id_and_param_type = None
Expand Down
122 changes: 92 additions & 30 deletions framework/tensorflow_model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,11 +299,22 @@ def init_model_from_dict(self, tf_dict):


model_data = {'parameters': {}, 'reduction_method': 'baseline'}
model_info = {'parameter_type': {}, 'parameter_dimensions': {}, 'parameter_index': {}, 'block_identifier': {}, 'topology_storage_format' : None, 'topology_compression_format' : None}
model_info = {'parameter_type': {}, 'parameter_dimensions': {}, 'parameter_index': {}, 'block_identifier': {}, 'original_size': {}, 'topology_storage_format' : None, 'topology_compression_format' : None}

type_list_int = ['int8', 'int16', 'int32', 'uint8', 'uint16', 'uint32']
type_list_1_bytes = ['int8', 'uint8']
type_list_2_bytes = ['int16', 'uint16', 'float16']
original_size = 0

for i, module_name in enumerate(tf_dict):
model_data['parameters'][module_name] = tf_dict[module_name][()]
if model_data['parameters'][module_name].dtype in type_list_1_bytes:
original_size += model_data['parameters'][module_name].size
elif model_data['parameters'][module_name].dtype in type_list_2_bytes:
original_size += model_data['parameters'][module_name].size*2
else:
original_size += model_data['parameters'][module_name].size*4
model_data['parameters'][module_name] = np.int32(model_data['parameters'][module_name]) if model_data['parameters'][module_name].dtype in type_list_int else model_data['parameters'][module_name]
mdl_shape = model_data['parameters'][module_name].shape
model_info['parameter_dimensions'][module_name] = mdl_shape
if len(mdl_shape) == 0: #scalar
Expand Down Expand Up @@ -331,6 +342,8 @@ def init_model_from_dict(self, tf_dict):
model_info['parameter_type'][module_name] = 'bn.var' if quantize_onedim else 'unspecified'
elif 'gamma' in module_name:
model_info['parameter_type'][module_name] = 'bn.gamma' if quantize_onedim else 'unspecified'
elif 'weight' in module_name:
model_info['parameter_type'][module_name] = 'weight' if quantize_onedim else 'unspecified'
else:
model_info['parameter_type'][module_name] = 'unspecified'
else:
Expand All @@ -339,6 +352,8 @@ def init_model_from_dict(self, tf_dict):
model_info['topology_storage_format'] = nnc_core.nnr_model.TopologyStorageFormat.NNR_TPL_TEF
model_info['topology_compression_format'] = nnc_core.nnr_model.TopologyCompressionFormat.NNR_PT_RAW

model_info["original_size"] = original_size

self.__model_info = model_info

return model_data["parameters"]
Expand Down Expand Up @@ -382,58 +397,105 @@ def guess_block_id_and_param_type(self, model_parameters):
try:
block_id_and_param_type = {"block_identifier" : {}, "parameter_type" : {}}
block_dict = dict()
blkNum = -1
for param in model_parameters.keys():
splitted_param = param.split("/")
for iElem, blkElem in enumerate(splitted_param):
if blkElem.endswith("_bn"):
splitted_param[iElem] = blkElem[0:-3]
elif blkElem.endswith("_BN"):
splitted_param[iElem] = blkElem[0:-3]
elif blkElem.startswith("bn_"):
splitted_param[iElem] = blkElem[3::]
elif blkElem.startswith("BN_"):
splitted_param[iElem] = blkElem[3::]
param_end = splitted_param[-1]
block_id = "/".join(splitted_param[0:-1])
base_block_id = "/".join(splitted_param[0:-1])
base_block_id = "/".join(splitted_param[0:-1])+":" if len(splitted_param[0:-1]) != 0 else "genericBlk:"
dims = len(model_parameters[param].shape)

if block_id not in block_dict.keys():
block_dict[block_id] = []
paramShape = model_parameters[param].shape


if dims > 1 and ('kernel' in param_end or 'weight' in param_end):
paramType = 'weight'
blockId = block_id
blockId = base_block_id
elif dims > 1:
paramType = 'weight'
blockId = base_block_id
elif dims == 1:
if 'bias' in param or 'beta' in param: ##could also be bn.beta
if 'bias' in param_end or 'beta' in param_end: ##could also be bn.beta
paramType = 'bias'
blockId = block_id
elif '.running_mean' in param or 'moving_mean' in param:
blockId = base_block_id
elif 'running_mean' in param_end or 'moving_mean' in param_end:
paramType = 'bn.mean'
blockId = block_id
elif '.running_var' in param or 'moving_variance' in param:
blockId = base_block_id
elif 'running_var' in param_end or 'moving_variance' in param_end:
paramType = 'bn.var'
blockId = block_id
elif '.weight' in param or 'gamma' in param:
blockId = base_block_id
elif 'weight_scaling' in param_end:
paramType = 'weight.ls'
blockId = base_block_id
elif 'gamma' in param_end:
paramType = 'bn.gamma'
blockId = block_id
blockId = base_block_id
elif 'weight' in param_end:
paramType = 'weight'
blockId = base_block_id
else:
paramType = 'unspecified'
blockId = None

else:
paramType = 'unspecified'
blockId = None


if blockId:
block_dict[block_id].append( [param, paramType, blockId] )
block_id = base_block_id + str(blkNum)
if block_id in block_dict.keys():
if any([a[1] == paramType for a in block_dict[block_id]]):
blkNum += 1
block_id = base_block_id + str(blkNum)
blockId = base_block_id + str(blkNum)
else:
blkNum += 1
block_id = base_block_id + str(blkNum)
blockId = base_block_id + str(blkNum)

if block_id not in block_dict.keys():
block_dict[block_id] = []

block_dict[block_id].append( [param, paramType, blockId, dims, paramShape] )
else:
block_id_and_param_type["parameter_type"][param] = paramType
block_id_and_param_type["block_identifier"][param] = blockId


weight_block_list = []
bn_block_list = []

for block_list in block_dict.values():
for par, parT, blkId in block_list:
if (len(block_list) == 5 and parT == 'bias') or ( len(block_list) == 6 and parT == 'bias' and 'beta' in par ): ## In this case bias in bn.beta
block_id_and_param_type["parameter_type"][par] = "bn.beta"
else:
block_id_and_param_type["parameter_type"][par] = parT
if any(["bn." in a[1] for a in block_list]):
for i, val in enumerate(block_list):
par, parT, blkId, dims, _ = val
if parT == 'weight' and dims == 1:
block_list[i][1] = "bn.gamma"
if parT == 'bias':
block_list[i][1] = "bn.beta"
bn_block_list.append( block_list )
else:
weight_block_list.append(block_list)

weight_shape = None
weight_blkId = None
for weight_block in weight_block_list:
weight_shape = None
weight_blkId = None
for par, parT, blkId, dims, paramSh in weight_block:
block_id_and_param_type["parameter_type"][par] = parT
block_id_and_param_type["block_identifier"][par] = blkId
if parT == 'weight':
weight_shape = paramSh
weight_blkId = blkId

if len(bn_block_list) != 0 and any([dim == bn_block_list[0][0][4][0] for dim in weight_shape]):
bn_block = bn_block_list.pop(0)
for par, parT, _, _, _ in bn_block:
block_id_and_param_type["parameter_type"][par] = parT
block_id_and_param_type["block_identifier"][par] = weight_blkId

assert len(bn_block_list) == 0, "Unhandled BN parameters!"

except:
print("INFO: Guessing of block_id_and_parameter_type failed! block_id_and_parameter_type has been set to 'None'!")
block_id_and_param_type = None
Expand Down
Loading

0 comments on commit 5b91f18

Please sign in to comment.