Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 54 additions & 0 deletions modelconverter/packages/hailo/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ class HailoExporter(Exporter):

def __init__(self, config: SingleStageConfig, output_dir: Path):
super().__init__(config=config, output_dir=output_dir)
self.force_onnx_names = config.hailo.force_onnx_names
self.optimization_level = config.hailo.optimization_level
self.compression_level = config.hailo.compression_level
self.batch_size = config.hailo.batch_size
Expand Down Expand Up @@ -69,6 +70,10 @@ def export(self) -> Path:
logger.info("Model translated to Hailo IR.")
har_path = self.input_model.with_suffix(".har")
runner.save_har(har_path)

if self.force_onnx_names:
har_path = self._force_onnx_names(har_path)

if self._disable_calibration:
self._inference_model_path = self.output_dir / Path(
self.original_model_name
Expand Down Expand Up @@ -96,6 +101,55 @@ def export(self) -> Path:
hef_file.write(hef)
return hef_path

def _force_onnx_names(self, har_path: Path) -> Path:
"""Force ONNX layer names into a .har model."""

runner = ClientRunner(hw_arch=self.hw_arch, har=str(har_path))
hn = runner.get_hn()
npz = dict(runner.get_params())

hn_layers = hn["layers"]

map_list = []
for name, layer in hn_layers.items():
if layer["type"] == "output_layer":
input_name = layer["input"][0]
context = input_name.split("/")[0]
orig_name = layer["original_names"][0]
new_name = f"{context}/{orig_name}"
map_list.append((input_name, new_name))

name_map = dict(map_list)

new_layers = {}
for name, layer in hn_layers.items():
new_name = name_map.get(name, name)
if "input" in layer:
layer["input"] = [name_map.get(i, i) for i in layer["input"]]
if "output" in layer:
layer["output"] = [name_map.get(o, o) for o in layer["output"]]
new_layers[new_name] = layer

hn["layers"] = new_layers

updated_npz = {}
for key, val in npz.items():
for old, new in name_map.items():
if old in key:
key = key.replace(old, new)
break
updated_npz[key] = val
npz = updated_npz

outputs = hn["net_params"]["output_layers_order"]
hn["net_params"]["output_layers_order"] = [name_map.get(o, o) for o in outputs]

runner.set_hn(hn)
runner.load_params(npz)
runner.save_har(har_path)

return har_path

def _get_calibration_data(
self, runner: ClientRunner
) -> dict[str, np.ndarray]:
Expand Down
1 change: 1 addition & 0 deletions modelconverter/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,7 @@ class TargetConfig(CustomBaseModel):


class HailoConfig(TargetConfig):
force_onnx_names: bool = True
optimization_level: Literal[-100, 0, 1, 2, 3, 4] = 2
compression_level: Literal[0, 1, 2, 3, 4, 5] = 2
batch_size: int = 8
Expand Down
3 changes: 3 additions & 0 deletions shared_with_container/configs/defaults.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,9 @@ stages:

# --- Hailo-Specific Arguments ---
hailo:
# Force onnx names on the final .har and .hef model
force_onnx_names: true

# Specifies the optimization level.
# Either a number between 0 and 4 or -100 to disable all optimizations.
optimization_level: 2
Expand Down
2 changes: 2 additions & 0 deletions tests/test_utils/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@
"optimization_level": 2,
},
"hailo": {
"force_onnx_names": True,
"optimization_level": 2,
"compression_level": 2,
"batch_size": 8,
Expand Down Expand Up @@ -369,6 +370,7 @@ def test_correct():
},
"rvc4": {**DEFAULT_TARGET_CONFIGS["rvc4"]},
"hailo": {
"force_onnx_names": True,
"disable_calibration": False,
"optimization_level": 3,
"compression_level": 3,
Expand Down
Loading