You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Starting: yolov9s_fp.pt
Opening YOLOv9 model
YOLOv9s summary (fused): 486 layers, 7,167,862 parameters, 0 gradients, 26.7 GFLOPs
Creating labels.txt file
Exporting the model to ONNX
Traceback (most recent call last):
File "C:\Users\koti0\OneDrive\Desktop\Deepstream\yolov9\export_yoloV9.py", line 145, in
main(args)
File "C:\Users\koti0\OneDrive\Desktop\Deepstream\yolov9\export_yoloV9.py", line 111, in main
torch.onnx.export(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 551, in export
_export(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 1648, in _export
graph, params_dict, torch_out = _model_to_graph(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 1170, in _model_to_graph
graph, params, torch_out, module = _create_jit_graph(model, args)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 1046, in _create_jit_graph
graph, torch_out = _trace_and_get_graph_from_model(model, args)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 950, in _trace_and_get_graph_from_model
trace_graph, torch_out, inputs_states = torch.jit._get_trace_graph(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\jit_trace.py", line 1497, in _get_trace_graph
outs = ONNXTracedModule(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\jit_trace.py", line 141, in forward
graph, out = torch._C._create_graph_by_tracing(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\jit_trace.py", line 132, in wrapper
outs.append(self.inner(*trace_inputs))
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1543, in _slow_forward
result = self.forward(*input, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\container.py", line 219, in forward
input = module(input)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1543, in _slow_forward
result = self.forward(*input, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\tasks.py", line 112, in forward
return self.predict(x, *args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\tasks.py", line 130, in predict
return self._predict_once(x, profile, visualize, embed)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\tasks.py", line 151, in _predict_once
x = m(x) # run
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1543, in _slow_forward
result = self.forward(*input, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\modules\head.py", line 72, in forward
y = self._inference(x)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\modules\head.py", line 108, in _inference
if self.export and self.format in {"saved_model", "pb", "tflite", "edgetpu", "tfjs"}: # avoid TF FlexSplitV ops
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1729, in getattr
raise AttributeError(f"'{type(self).name}' object has no attribute '{name}'")
AttributeError: 'Detect' object has no attribute 'format'
The text was updated successfully, but these errors were encountered:
koti-malla
changed the title
problem with conversion of yolov9s with export_yoloV9.py
Error during YOLOv9 ONNX Export [custom model ] with export_yoloV9.py : AttributeError: 'Detect' object has no attribute 'format'
Nov 9, 2024
Starting: yolov9s_fp.pt
Opening YOLOv9 model
YOLOv9s summary (fused): 486 layers, 7,167,862 parameters, 0 gradients, 26.7 GFLOPs
Creating labels.txt file
Exporting the model to ONNX
Traceback (most recent call last):
File "C:\Users\koti0\OneDrive\Desktop\Deepstream\yolov9\export_yoloV9.py", line 145, in
main(args)
File "C:\Users\koti0\OneDrive\Desktop\Deepstream\yolov9\export_yoloV9.py", line 111, in main
torch.onnx.export(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 551, in export
_export(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 1648, in _export
graph, params_dict, torch_out = _model_to_graph(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 1170, in _model_to_graph
graph, params, torch_out, module = _create_jit_graph(model, args)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 1046, in _create_jit_graph
graph, torch_out = _trace_and_get_graph_from_model(model, args)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\onnx\utils.py", line 950, in _trace_and_get_graph_from_model
trace_graph, torch_out, inputs_states = torch.jit._get_trace_graph(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\jit_trace.py", line 1497, in _get_trace_graph
outs = ONNXTracedModule(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\jit_trace.py", line 141, in forward
graph, out = torch._C._create_graph_by_tracing(
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\jit_trace.py", line 132, in wrapper
outs.append(self.inner(*trace_inputs))
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1543, in _slow_forward
result = self.forward(*input, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\container.py", line 219, in forward
input = module(input)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1543, in _slow_forward
result = self.forward(*input, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\tasks.py", line 112, in forward
return self.predict(x, *args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\tasks.py", line 130, in predict
return self._predict_once(x, profile, visualize, embed)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\tasks.py", line 151, in _predict_once
x = m(x) # run
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1543, in _slow_forward
result = self.forward(*input, **kwargs)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\modules\head.py", line 72, in forward
y = self._inference(x)
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\ultralytics\nn\modules\head.py", line 108, in _inference
if self.export and self.format in {"saved_model", "pb", "tflite", "edgetpu", "tfjs"}: # avoid TF FlexSplitV ops
File "C:\Users\koti0\AppData\Local\Programs\Python\Python39\lib\site-packages\torch\nn\modules\module.py", line 1729, in getattr
raise AttributeError(f"'{type(self).name}' object has no attribute '{name}'")
AttributeError: 'Detect' object has no attribute 'format'
The text was updated successfully, but these errors were encountered: