ValueError: Unsupported ONNX opset version: 13

ValueError: Unsupported ONNX opset version: 13

目标:在 Jupyter Labs 上成功 运行 Notebook 原样

第 2.1 节 抛出一个 ValueError,我相信是因为我使用的 PyTorch 版本。

非常相似;解决方案是使用最新的 PyTorch 版本...我正在使用。


代码:

import onnxruntime

def export_onnx_model(args, model, tokenizer, onnx_model_path):
    with torch.no_grad():
        inputs = {'input_ids':      torch.ones(1,128, dtype=torch.int64),
                    'attention_mask': torch.ones(1,128, dtype=torch.int64),
                    'token_type_ids': torch.ones(1,128, dtype=torch.int64)}
        outputs = model(**inputs)

        symbolic_names = {0: 'batch_size', 1: 'max_seq_len'}
        torch.onnx.export(model,                                            # model being run
                    (inputs['input_ids'],                             # model input (or a tuple for multiple inputs)
                    inputs['attention_mask'], 
                    inputs['token_type_ids']),                                         # model input (or a tuple for multiple inputs)
                    onnx_model_path,                                # where to save the model (can be a file or file-like object)
                    opset_version=13,                                 # the ONNX version to export the model to
                    do_constant_folding=True,     
                    input_names=['input_ids',                         # the model's input names
                                'input_mask', 
                                'segment_ids'],
                    output_names=['output'],                    # the model's output names
                    dynamic_axes={'input_ids': symbolic_names,        # variable length axes
                                'input_mask' : symbolic_names,
                                'segment_ids' : symbolic_names})
        logger.info("ONNX Model exported to {0}".format(onnx_model_path))

export_onnx_model(configs, model, tokenizer, "bert.onnx")

回溯:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-7-7aaa4c5455a0> in <module>
     25         logger.info("ONNX Model exported to {0}".format(onnx_model_path))
     26 
---> 27 export_onnx_model(configs, model, tokenizer, "bert.onnx")

<ipython-input-7-7aaa4c5455a0> in export_onnx_model(args, model, tokenizer, onnx_model_path)
     22                     dynamic_axes={'input_ids': symbolic_names,        # variable length axes
     23                                 'input_mask' : symbolic_names,
---> 24                                 'segment_ids' : symbolic_names})
     25         logger.info("ONNX Model exported to {0}".format(onnx_model_path))
     26 

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/__init__.py in export(model, args, f, export_params, verbose, training, input_names, output_names, aten, export_raw_ir, operator_export_type, opset_version, _retain_param_name, do_constant_folding, example_outputs, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, custom_opsets, enable_onnx_checker, use_external_data_format)
    228                         do_constant_folding, example_outputs,
    229                         strip_doc_string, dynamic_axes, keep_initializers_as_inputs,
--> 230                         custom_opsets, enable_onnx_checker, use_external_data_format)
    231 
    232 

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/utils.py in export(model, args, f, export_params, verbose, training, input_names, output_names, aten, export_raw_ir, operator_export_type, opset_version, _retain_param_name, do_constant_folding, example_outputs, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, custom_opsets, enable_onnx_checker, use_external_data_format)
     89             dynamic_axes=dynamic_axes, keep_initializers_as_inputs=keep_initializers_as_inputs,
     90             custom_opsets=custom_opsets, enable_onnx_checker=enable_onnx_checker,
---> 91             use_external_data_format=use_external_data_format)
     92 
     93 

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/utils.py in _export(model, args, f, export_params, verbose, training, input_names, output_names, operator_export_type, export_type, example_outputs, opset_version, _retain_param_name, do_constant_folding, strip_doc_string, dynamic_axes, keep_initializers_as_inputs, fixed_batch_size, custom_opsets, add_node_names, enable_onnx_checker, use_external_data_format, onnx_shape_inference, use_new_jit_passes)
    614         # training=TrainingMode.TRAINING or training=TrainingMode.PRESERVE,
    615         # (to preserve whatever the original training mode was.)
--> 616         _set_opset_version(opset_version)
    617         _set_operator_export_type(operator_export_type)
    618         with select_model_mode_for_export(model, training):

~/anaconda3/envs/pytorch_latest_p36/lib/python3.6/site-packages/torch/onnx/symbolic_helper.py in _set_opset_version(opset_version)
    506         _export_onnx_opset_version = opset_version
    507         return
--> 508     raise ValueError("Unsupported ONNX opset version: " + str(opset_version))
    509 
    510 _operator_export_type = None

ValueError: Unsupported ONNX opset version: 13

如果我还有什么要补充的,请告诉我 post。

ValueError: Unsupported ONNX opset version N -> 安装最新的 PyTorch.

在此 Git Issue.

上感谢 Tianleiwu

根据 Notebook 的第一个单元格:

# Install or upgrade PyTorch 1.8.0 and OnnxRuntime 1.7.0 for CPU-only.

我在之后插入了一个新单元格:

pip install torch==1.10.0  # latest