Search code examples
pythonpytorchonnxquantizationonnxruntime

ValueError: Unsupported ONNX opset version: 13


Goal: successfully run Notebook as is on Jupyter Labs.

Section 2.1 throws a ValueError, I believe because of the version of PyTorch I'm using.

  • PyTorch 1.7.1
  • Kernel conda_pytorch_latest_p36

Very similar SO post; the solution was to use the latest PyTorch version... which I am using.


Code:

import onnxruntime

def export_onnx_model(args, model, tokenizer, onnx_model_path):
    with torch.no_grad():
        inputs = {'input_ids':      torch.ones(1,128, dtype=torch.int64),
                    'attention_mask': torch.ones(1,128, dtype=torch.int64),
                    'token_type_ids': torch.ones(1,128, dtype=torch.int64)}
        outputs = model(**inputs)

        symbolic_names = {0: 'batch_size', 1: 'max_seq_len'}
        torch.onnx.export(model,                                            # model being run
                    (inputs['input_ids'],                             # model input (or a tuple for multiple inputs)
                    inputs['attention_mask'], 
                    inputs['token_type_ids']),                                         # model input (or a tuple for multiple inputs)
                    onnx_model_path,                                # where to save the model (can be a file or file-like object)
                    opset_version=13,                                 # the ONNX version to export the model to
                    do_constant_folding=True,     
                    input_names=['input_ids',                         # the model's input names
                                'input_mask', 
                                'segment_ids'],
                    output_names=['output'],                    # the model's output names
                    dynamic_axes={'input_ids': symbolic_names,        # variable length axes
                                'input_mask' : symbolic_names,
                                'segment_ids' : symbolic_names})
        logger.info("ONNX Model exported to {0}".format(onnx_model_path))

export_onnx_model(configs, model, tokenizer, "bert.onnx")

Traceback:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-7-7aaa4c5455a0> in <module>
     25         logger.info("ONNX Model exported to {0}".format(onnx_model_path))
     26 
---> 27 export_onnx_model(configs, model, tokenizer, "bert.onnx")

<ipython-input-7-7aaa4c5455a0> in export_onnx_model(args, model, tokenizer, onnx_model_path)
     22                     dynamic_axes={'input_ids': symbolic_names,        # variable length axes
     23                                 'input_mask' : symbolic_names,
---> 24                                 'segment_ids' : symbolic_names})
     25         logger.info("ONNX Model exported to {0}".format(onnx_model_path))
     26 

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/__init__.py in export(model, args, f, export_params, verbose, training, input_names, output_names, aten, export_raw_ir, operator_export_type, opset_version, _retain_param_name, do_constant_folding, example_outputs, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, custom_opsets, enable_onnx_checker, use_external_data_format)
    228                         do_constant_folding, example_outputs,
    229                         strip_doc_string, dynamic_axes, keep_initializers_as_inputs,
--> 230                         custom_opsets, enable_onnx_checker, use_external_data_format)
    231 
    232 

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/utils.py in export(model, args, f, export_params, verbose, training, input_names, output_names, aten, export_raw_ir, operator_export_type, opset_version, _retain_param_name, do_constant_folding, example_outputs, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, custom_opsets, enable_onnx_checker, use_external_data_format)
     89             dynamic_axes=dynamic_axes, keep_initializers_as_inputs=keep_initializers_as_inputs,
     90             custom_opsets=custom_opsets, enable_onnx_checker=enable_onnx_checker,
---> 91             use_external_data_format=use_external_data_format)
     92 
     93 

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/utils.py in _export(model, args, f, export_params, verbose, training, input_names, output_names, operator_export_type, export_type, example_outputs, opset_version, _retain_param_name, do_constant_folding, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, fixed_batch_size, custom_opsets, add_node_names, enable_onnx_checker, use_external_data_format, onnx_shape_inference, use_new_jit_passes)
    614         # training=TrainingMode.TRAINING or training=TrainingMode.PRESERVE,
    615         # (to preserve whatever the original training mode was.)
--> 616         _set_opset_version(opset_version)
    617         _set_operator_export_type(operator_export_type)
    618         with select_model_mode_for_export(model, training):

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/symbolic_helper.py in _set_opset_version(opset_version)
    506         _export_onnx_opset_version = opset_version
    507         return
--> 508     raise ValueError("Unsupported ONNX opset version: " + str(opset_version))
    509 
    510 _operator_export_type = None

ValueError: Unsupported ONNX opset version: 13

Please let me know if there's anything else I can add to post.


Solution

  • ValueError: Unsupported ONNX opset version N -> install latest PyTorch.

    Credit to Tianleiwu on this Git Issue.

    As per 1st cell of Notebook:

    # Install or upgrade PyTorch 1.8.0 and OnnxRuntime 1.7.0 for CPU-only.
    

    I inserted a new cell right after:

    pip install torch==1.10.0  # latest