ONNX Backends for onnxruntime1#

Backend class: OnnxInferenceBackendOrt.

<<<

import unittest
import sys
from datetime import datetime
from contextlib import redirect_stdout, redirect_stderr
from io import StringIO
from onnx.backend.test import BackendTest
from onnx import __version__ as onnx_version
from onnxruntime import __version__ as ort_version
from numpy import __version__ as npy_version
import mlprodict.onnxrt.backend_ort as backend

back_test = BackendTest(backend, __name__)
back_test.include('.*_cpu')
back_test.exclude('.*_blvc_.*')
back_test.exclude('.*_densenet_.*')
back_test.exclude('.*_densenet121_.*')
back_test.exclude('.*_inception_.*')
back_test.exclude('.*_resnet50_.*')
back_test.exclude('.*_shufflenet_.*')
back_test.exclude('.*_squeezenet_.*')
back_test.exclude('.*_vgg19_.*')
back_test.exclude('.*_zfnet512_.*')
globals().update(back_test.enable_report().test_cases)

print('---------------------------------')
print('python', sys.version)
print('onnx', onnx_version)
print('onnxruntime', ort_version)
print('numpy', npy_version)
print('---------------------------------')
print(datetime.now(), "BEGIN")
print('---------------------------------')

buffer = StringIO()
if True:
    with redirect_stdout(buffer):
        with redirect_stderr(buffer):
            res = unittest.main(verbosity=2, exit=False)
else:
    res = unittest.main(verbosity=2, exit=False)

testsRun = res.result.testsRun
errors = len(res.result.errors)
skipped = len(res.result.skipped)
unexpectedSuccesses = len(res.result.unexpectedSuccesses)
expectedFailures = len(res.result.expectedFailures)

print('---------------------------------')
print(datetime.now(), "END")
print('---------------------------------')

print("testsRun=%d errors=%d skipped=%d" % (testsRun, errors, skipped))
print("unexpectedSuccesses=%d expectedFailures=%d" % (
    unexpectedSuccesses, expectedFailures))
ran = testsRun - skipped
print("ratio=%f" % (1 - errors * 1.0 / ran))
print('---------------------------------')
lines = buffer.getvalue().split('\n')
print("\n".join(line for line in lines
      if "skipped 'no matched include pattern'" not in line))

>>>

    ---------------------------------
    python 3.9.1 (default, Jan 18 2021, 16:35:58) 
    [GCC 8.3.0]
    onnx 1.13.0
    onnxruntime 1.13.1
    numpy 1.23.5
    ---------------------------------
    2023-02-04 07:11:06.208338 BEGIN
    ---------------------------------
    ---------------------------------
    2023-02-04 07:11:46.489833 END
    ---------------------------------
    testsRun=2492 errors=352 skipped=1254
    unexpectedSuccesses=0 expectedFailures=0
    ratio=0.715670
    ---------------------------------
    test_abs_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/npy/xop.py:17: DeprecationWarning: Please use `coo_matrix` from the `scipy.sparse` namespace, the `scipy.sparse.coo` namespace is deprecated.
      from scipy.sparse.coo import coo_matrix
    somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py:207: DeprecationWarning: `np.object` is a deprecated alias for the builtin `object`. To silence this warning, use `object` by itself. Doing this will not modify any behavior and is safe. 
    Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations
      if ref_outputs[i].dtype == np.object:
    ok
    test_acos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_acos_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_acosh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_acosh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_adagrad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_adagrad_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_adam_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_adam_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_add_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_add_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_add_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_and2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmin_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asin_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asin_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asinh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asinh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atan_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atanh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atanh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_1d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_precomputed_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_3d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_basic_conv_with_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_basic_conv_without_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_batchnorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_batchnorm_epsilon_training_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_batchnorm_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_batchnorm_example_training_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bernoulli_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_bernoulli_double_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_bernoulli_double_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_bernoulli_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_bernoulli_seed_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_bernoulli_seed_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_bitshift_left_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_left_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bitshift_left_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bitshift_left_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bitshift_right_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_right_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bitshift_right_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bitshift_right_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_bitwise_and_i16_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_i32_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_not_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_not_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_not_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_i16_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_i32_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_i16_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_i32_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_blackmanwindow_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_blackmanwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_blackmanwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_blackmanwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cast_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_cast_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_BFLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_DOUBLE_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_DOUBLE_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT16_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_BFLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_castlike_FLOAT_to_STRING_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_castlike_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_STRING_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_ceil_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_celu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_celu_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_center_crop_pad_crop_and_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_and_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_chw_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_chw_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_hwc_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_hwc_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_max_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_max_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_default_int8_min_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_min_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_default_max_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_max_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_min_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_min_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_outbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_outbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_splitbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_splitbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_col2im_5d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_compress_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_compress_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_compress_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_compress_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_1d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_1d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_3d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_3d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_3d_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_3d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_3d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_concat_3d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_constant_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constant_pad_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constant_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constantofshape_float_ones_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_constantofshape_int_shape_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_constantofshape_int_zeros_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_conv_with_autopad_same_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_conv_with_strides_and_asymmetric_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_conv_with_strides_no_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_conv_with_strides_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convinteger_with_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convinteger_without_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_autopad_same_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_kernel_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_convtranspose_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cos_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cosh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cosh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_1d_exclusive_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_1d_reverse_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_1d_reverse_exclusive_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_2d_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_depthtospace_crd_mode_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_depthtospace_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dequantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dequantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_det_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_det_nd_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dft_axis_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_dft_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_dft_inverse_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_div_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_div_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_div_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_div_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dropout_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dropout_default_mask_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dropout_default_old_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dropout_default_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dropout_random_old_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dynamicquantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dynamicquantizelinear_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dynamicquantizelinear_max_adjusted_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dynamicquantizelinear_max_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dynamicquantizelinear_min_adjusted_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dynamicquantizelinear_min_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_edge_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_einsum_batch_diagonal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_einsum_batch_matmul_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_einsum_inner_prod_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_einsum_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_einsum_transpose_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_elu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_elu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_erf_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_exp_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_exp_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_expand_dim_changed_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_expand_dim_unchanged_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_eyelike_populate_off_main_diagonal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_eyelike_with_dtype_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_eyelike_without_dtype_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_negative_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_negative_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_negative_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_flatten_negative_axis4_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_floor_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_floor_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_2d_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_elements_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_elements_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_elements_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gathernd_example_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gathernd_example_int32_batch_dim1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gathernd_example_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_all_attributes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_alpha_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_beta_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_default_matrix_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_default_no_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_default_scalar_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_default_single_elem_vector_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_default_vector_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_default_zero_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_transposeA_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gemm_transposeB_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_globalaveragepool_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_globalaveragepool_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_globalmaxpool_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_globalmaxpool_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_aligncorners_true_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_bicubic_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_bilinear_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_border_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_reflection_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_zeros_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_group_normalization_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gru_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gru_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gru_seq_length_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gru_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hammingwindow_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hammingwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hammingwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hammingwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hannwindow_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hannwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hannwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hannwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_one_hot_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardsigmoid_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardsigmoid_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardswish_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardswish_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_identity_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_identity_opt_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_identity_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_if_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_if_opt_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_if_seq_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_instancenorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_instancenorm_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isinf_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isinf_negative_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isinf_positive_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isnan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis_negative_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis_negative_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis0_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis0_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis0_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis1_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis1_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis2_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis2_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_1_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis_negative_1_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_2_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis_negative_2_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_3_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis_negative_3_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis3_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_3_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_4_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_4_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_4d_axis_negative_4_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_leakyrelu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_default_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_log_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_log_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_example_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_example_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_example_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_large_number_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_loop11_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_loop13_seq_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_loop16_seq_none_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lrn_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_lrn_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_lstm_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lstm_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_lstm_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_lstm_with_peepholes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_matmul_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_matmul_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_matmul_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_matmulinteger_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_max_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_max_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_max_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_1d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_2d_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_3d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_with_argmax_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_with_argmax_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxunpool_export_with_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_maxunpool_export_without_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mean_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mean_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mean_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_melweightmatrix_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_min_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_min_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_min_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mish_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mish_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mod_broadcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_int64_fmod_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_momentum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_momentum_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mul_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mul_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mul_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mul_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mvn_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mvn_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mvn_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_neg_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nesterov_momentum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NC_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NC_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_no_weight_reduction_mean_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_reduction_sum_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_center_point_box_format_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_flipped_coordinates_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_identical_boxes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_limit_output_size_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_single_box_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_suppress_by_IOU_and_scores_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_suppress_by_IOU_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_two_batches_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonmaxsuppression_two_classes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nonzero_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_not_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_not_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_not_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_onehot_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_onehot_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_onehot_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_onehot_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_optional_get_element_optional_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_optional_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_name_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_name_tensor_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_tensor_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_tensor_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_or2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_bcast_array_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_bcast_scalar_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_pow_types_float32_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_pow_types_int32_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int32_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int64_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int64_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_prelu_broadcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_prelu_broadcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_prelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_prelu_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_qlinearconv_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_qlinearmatmul_2D_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_qlinearmatmul_3D_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_quantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_quantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_range_float_type_positive_delta_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_range_float_type_positive_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_range_int32_type_negative_delta_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_range_int32_type_negative_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reciprocal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reciprocal_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_l1_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_asc_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_asc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_default_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_desc_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_desc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_negative_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_default_axes_keepdim_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_empty_axes_input_noop_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_sum_square_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reflect_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_relu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_relu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_allowzero_reordered_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_extended_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_negative_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_negative_extended_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_one_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_reduced_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_reordered_all_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_reordered_last_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_zero_and_negative_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reshape_zero_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_resize_downsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_linear_pytorch_half_pixel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_nearest_not_smaller_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_tf_crop_and_resize_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_tf_crop_and_resize_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_tf_crop_and_resize_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_asymmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_ceil_half_pixel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_floor_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reversesequence_batch_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reversesequence_time_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_rnn_seq_length_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_roialign_aligned_false_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_roialign_aligned_true_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_round_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_scan9_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_scan_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_scatter_elements_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_duplicate_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_reduction_max_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_reduction_min_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_scatter_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_scatternd_add_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_max_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_min_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_multiply_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1_mean_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_no_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_weights_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_weights_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_weights_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_none_weights_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_sum_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sce_sum_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_selu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_selu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_insert_at_back_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_insert_at_front_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_2_sequences_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_extract_shapes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_extract_shapes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_2_sequences_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_clip_end_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_clip_start_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_end_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_start_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_start_1_end_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_start_1_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shape_start_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shrink_hard_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shrink_hard_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shrink_soft_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shrink_soft_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sigmoid_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sigmoid_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sign_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_simple_rnn_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_simple_rnn_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_simple_rnn_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sin_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sin_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sinh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sinh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_size_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_size_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_default_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_default_steps_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_end_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_neg_steps_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_slice_start_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_large_number_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softplus_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softplus_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softplus_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softplus_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softsign_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softsign_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softsign_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softsign_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_spacetodepth_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_spacetodepth_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_1d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_2d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_1d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_equal_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_2d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_equal_parts_default_axis_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_equal_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_1d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_variable_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_2d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_variable_parts_2d_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_default_axis_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_variable_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_zero_size_splits_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_split_zero_size_splits_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sqrt_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sqrt_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_squeeze_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_squeeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_stft_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_stft_with_window_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_strnormalizer_export_monday_casesensintive_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_strnormalizer_export_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_strnormalizer_export_monday_casesensintive_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_strnormalizer_export_monday_empty_output_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_strnormalizer_export_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_strnormalizer_nostopwords_nochangecase_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sum_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sum_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sum_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tan_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tanh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tanh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_batch_onlybigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_batch_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_batch_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_only_bigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_onlybigrams_levelempty_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_thresholdedrelu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_thresholdedrelu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_thresholdedrelu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_thresholdedrelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_thresholdedrelu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_thresholdedrelu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tile_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tile_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_top_k_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_top_k_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_top_k_smallest_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_training_dropout_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_training_dropout_default_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_training_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_training_dropout_mask_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_training_dropout_zero_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_training_dropout_zero_ratio_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_all_permutations_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_all_permutations_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_all_permutations_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_all_permutations_3_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_all_permutations_4_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_all_permutations_5_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_transpose_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_one_row_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_out_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_out_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_square_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_square_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_one_row_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_out_neg_out_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_out_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_square_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_square_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unique_not_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unique_sorted_with_axis_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unique_sorted_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unique_sorted_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unique_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_three_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_two_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_unsqueeze_unsorted_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_upsample_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_where_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_where_long_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_AvgPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool2d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool3d_stride1_pad0_gpu_input_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm1d_3d_input_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/plotting/text_plot.py:469: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      return TENSOR_TYPE_TO_NP_TYPE[TensorProto.FLOAT]  # pylint: disable=E1101
    ERROR
    test_BatchNorm2d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm2d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm3d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm3d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ConstantPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_pad1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_pad1size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_pad2_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_pad2size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_depthwise_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_depthwise_padded_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_depthwise_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_depthwise_with_multiplier_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_groups_thnn_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv2d_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_dilated_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Conv3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ConvTranspose2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ConvTranspose2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Embedding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Embedding_sparse_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_GLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_GLU_dim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_LeakyReLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_LeakyReLU_with_negval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Linear_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Linear_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_LogSoftmax_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool1d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool2d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_PReLU_1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_1d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_2d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_3d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PixelShuffle_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_PoissonNLLLLoss_no_reduce_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ReLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ReflectionPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ReplicationPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_SELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Sigmoid_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softmax_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softmin_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softplus_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softsign_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Tanh_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ZeroPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_log_softmax_dim3_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_log_softmax_lastdim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_softmax_functional_dim3_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_softmax_lastdim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_operator_add_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_add_size1_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_add_size1_right_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_add_size1_singleton_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_addconstant_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_addmm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_basic_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_chunk_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_clip_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_concat2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_conv_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_convtranspose_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_exp_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_flatten_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_index_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_max_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_maxpool_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_min_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_mm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_non_float_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/plotting/text_plot.py:475: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      return TENSOR_TYPE_TO_NP_TYPE[TensorProto.INT64]  # pylint: disable=E1101
    ERROR
    test_operator_pad_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_permute2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_pow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_reduced_mean_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_reduced_mean_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_reduced_sum_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_reduced_sum_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_repeat_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_repeat_dim_overflow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_selu_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_sqrt_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_symbolic_override_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_symbolic_override_nested_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_view_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_bvlc_alexnet_cpu (__main__.OnnxBackendRealModelTest) ... ok
    test_densenet121_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_densenet121_.*"'
    test_densenet121_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_densenet121_.*"'
    test_inception_v1_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_inception_v1_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_inception_v2_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_inception_v2_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_resnet50_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_resnet50_.*"'
    test_resnet50_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_resnet50_.*"'
    test_shufflenet_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_shufflenet_.*"'
    test_shufflenet_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_shufflenet_.*"'
    test_squeezenet_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_squeezenet_.*"'
    test_squeezenet_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_squeezenet_.*"'
    test_vgg19_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_vgg19_.*"'
    test_vgg19_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_vgg19_.*"'
    test_zfnet512_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_zfnet512_.*"'
    test_zfnet512_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_zfnet512_.*"'
    test_expand_shape_model1_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_expand_shape_model2_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_expand_shape_model3_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_expand_shape_model4_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_gradient_of_add_and_mul_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_gradient_of_add_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model1_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model2_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model3_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model4_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model5_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model6_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model7_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sequence_model8_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_shrink_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sign_model_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_single_relu_model_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_monday_casesensintive_lower_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_monday_casesensintive_upper_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_monday_empty_output_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_nostopwords_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    
    ======================================================================
    ERROR: test_adagrad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adagrad(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adagrad(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X' type=dtype('float32') shape=[1]
    input: name='G' type=dtype('float32') shape=[1]
    input: name='H' type=dtype('float32') shape=[1]
    Adagrad[ai.onnx.preview.training](R, T, X, G, H, decay_factor=0.10, epsilon=0.00, norm_coefficient=0.00) -> X_new, H_new
    output: name='X_new' type=dtype('float32') shape=[1]
    output: name='H_new' type=dtype('float32') shape=[1].
    
    ======================================================================
    ERROR: test_adagrad_multiple_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adagrad(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adagrad(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X1' type=dtype('float32') shape=[1]
    input: name='X2' type=dtype('float32') shape=[2]
    input: name='G1' type=dtype('float32') shape=[1]
    input: name='G2' type=dtype('float32') shape=[2]
    input: name='H1' type=dtype('float32') shape=[1]
    input: name='H2' type=dtype('float32') shape=[2]
    Adagrad[ai.onnx.preview.training](R, T, X1, X2, G1, G2, H1, H2, decay_factor=0.10, epsilon=0.00, norm_coefficient=0.00) -> X1_new, X2_new, H1_new, H2_new
    output: name='X1_new' type=dtype('float32') shape=[1]
    output: name='X2_new' type=dtype('float32') shape=[2]
    output: name='H1_new' type=dtype('float32') shape=[1]
    output: name='H2_new' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_adam_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adam(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adam(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X' type=dtype('float32') shape=[2]
    input: name='G' type=dtype('float32') shape=[2]
    input: name='V' type=dtype('float32') shape=[2]
    input: name='H' type=dtype('float32') shape=[2]
    Adam[ai.onnx.preview.training](R, T, X, G, V, H, alpha=0.95, beta=0.10, epsilon=0.00, norm_coefficient=0.00) -> X_new, V_new, H_new
    output: name='X_new' type=dtype('float32') shape=[2]
    output: name='V_new' type=dtype('float32') shape=[2]
    output: name='H_new' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_adam_multiple_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adam(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Adam(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X1' type=dtype('float32') shape=[1]
    input: name='X2' type=dtype('float32') shape=[2]
    input: name='G1' type=dtype('float32') shape=[1]
    input: name='G2' type=dtype('float32') shape=[2]
    input: name='V1' type=dtype('float32') shape=[1]
    input: name='V2' type=dtype('float32') shape=[2]
    input: name='H1' type=dtype('float32') shape=[1]
    input: name='H2' type=dtype('float32') shape=[2]
    Adam[ai.onnx.preview.training](R, T, X1, X2, G1, G2, V1, V2, H1, H2, alpha=0.95, beta=0.85, norm_coefficient=0.00) -> X1_new, X2_new, V1_new, V2_new, H1_new, H2_new
    output: name='X1_new' type=dtype('float32') shape=[1]
    output: name='X2_new' type=dtype('float32') shape=[2]
    output: name='V1_new' type=dtype('float32') shape=[1]
    output: name='V2_new' type=dtype('float32') shape=[2]
    output: name='H1_new' type=dtype('float32') shape=[1]
    output: name='H2_new' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_add_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(14) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(14) node with name '''
    opset: domain='' version=14
    input: name='x' type=dtype('uint8') shape=[3, 4, 5]
    input: name='y' type=dtype('uint8') shape=[3, 4, 5]
    Add(x, y) -> sum
    output: name='sum' type=dtype('uint8') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitshift_left_uint16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BitShift(11) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BitShift(11) node with name '''
    opset: domain='' version=11
    input: name='x' type=dtype('uint16') shape=[3]
    input: name='y' type=dtype('uint16') shape=[3]
    BitShift(x, y, direction=b'LEFT') -> z
    output: name='z' type=dtype('uint16') shape=[3].
    
    ======================================================================
    ERROR: test_bitshift_right_uint16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BitShift(11) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BitShift(11) node with name '''
    opset: domain='' version=11
    input: name='x' type=dtype('uint16') shape=[3]
    input: name='y' type=dtype('uint16') shape=[3]
    BitShift(x, y, direction=b'RIGHT') -> z
    output: name='z' type=dtype('uint16') shape=[3].
    
    ======================================================================
    ERROR: test_bitwise_and_i16_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int16') shape=[3, 4, 5]
    input: name='y' type=dtype('int16') shape=[3, 4, 5]
    BitwiseAnd(x, y) -> bitwiseand
    output: name='bitwiseand' type=dtype('int16') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitwise_and_i32_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int32') shape=[3, 4]
    input: name='y' type=dtype('int32') shape=[3, 4]
    BitwiseAnd(x, y) -> bitwiseand
    output: name='bitwiseand' type=dtype('int32') shape=[3, 4].
    
    ======================================================================
    ERROR: test_bitwise_and_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint64') shape=[3, 4, 5]
    input: name='y' type=dtype('uint64') shape=[5]
    BitwiseAnd(x, y) -> bitwiseand
    output: name='bitwiseand' type=dtype('uint64') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitwise_and_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint8') shape=[3, 4, 5, 6]
    input: name='y' type=dtype('uint8') shape=[4, 5, 6]
    BitwiseAnd(x, y) -> bitwiseand
    output: name='bitwiseand' type=dtype('uint8') shape=[3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_bitwise_not_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int32') shape=[3, 4]
    BitwiseNot(x) -> bitwise_not
    output: name='bitwise_not' type=dtype('int32') shape=[3, 4].
    
    ======================================================================
    ERROR: test_bitwise_not_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint16') shape=[3, 4, 5]
    BitwiseNot(x) -> bitwise_not
    output: name='bitwise_not' type=dtype('uint16') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitwise_not_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint8') shape=[3, 4, 5, 6]
    BitwiseNot(x) -> bitwise_not
    output: name='bitwise_not' type=dtype('uint8') shape=[3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_bitwise_or_i16_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int8') shape=[3, 4, 5, 6]
    input: name='y' type=dtype('int8') shape=[3, 4, 5, 6]
    BitwiseOr(x, y) -> bitwiseor
    output: name='bitwiseor' type=dtype('int8') shape=[3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_bitwise_or_i32_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int32') shape=[3, 4]
    input: name='y' type=dtype('int32') shape=[3, 4]
    BitwiseOr(x, y) -> bitwiseor
    output: name='bitwiseor' type=dtype('int32') shape=[3, 4].
    
    ======================================================================
    ERROR: test_bitwise_or_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint64') shape=[3, 4, 5]
    input: name='y' type=dtype('uint64') shape=[5]
    BitwiseOr(x, y) -> bitwiseor
    output: name='bitwiseor' type=dtype('uint64') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitwise_or_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint8') shape=[3, 4, 5, 6]
    input: name='y' type=dtype('uint8') shape=[4, 5, 6]
    BitwiseOr(x, y) -> bitwiseor
    output: name='bitwiseor' type=dtype('uint8') shape=[3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_bitwise_xor_i16_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int16') shape=[3, 4, 5]
    input: name='y' type=dtype('int16') shape=[3, 4, 5]
    BitwiseXor(x, y) -> bitwisexor
    output: name='bitwisexor' type=dtype('int16') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitwise_xor_i32_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int32') shape=[3, 4]
    input: name='y' type=dtype('int32') shape=[3, 4]
    BitwiseXor(x, y) -> bitwisexor
    output: name='bitwisexor' type=dtype('int32') shape=[3, 4].
    
    ======================================================================
    ERROR: test_bitwise_xor_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint64') shape=[3, 4, 5]
    input: name='y' type=dtype('uint64') shape=[5]
    BitwiseXor(x, y) -> bitwisexor
    output: name='bitwisexor' type=dtype('uint64') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_bitwise_xor_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('uint8') shape=[3, 4, 5, 6]
    input: name='y' type=dtype('uint8') shape=[4, 5, 6]
    BitwiseXor(x, y) -> bitwisexor
    output: name='bitwisexor' type=dtype('uint8') shape=[3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_cast_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_cast_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 629, in to_sequence
        outputs[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_BFLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_BFLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_and_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[20, 8, 3]
    input: name='shape' type=dtype('int64') shape=[3]
    CenterCropPad(x, shape) -> y
    output: name='y' type=dtype('float32') shape=[10, 10, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_and_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[20, 8, 3]
    input: name='shape' type=dtype('int64') shape=[3]
    Constant(value=[2]) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_k2
    Shape(x) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_x_shape
      Max(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_x_shape, shape) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_padded_sh
      Sub(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_padded_sh, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_x_shape) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount
      Div(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount_left
        Sub(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount_left) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount_right
        Concat(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount_left, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pad_amount_right, axis=0) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pads
          Pad(x, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_pads) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_padded_input
            Shape(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_padded_input) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_x_shape2
              Sub(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_x_shape2, shape) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_sh_diff
      Div(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_sh_diff, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_start_dims
        Add(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_start_dims, shape) -> CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_end_dims
        Slice(CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_padded_input, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_start_dims, CenterCropPad_test_center_crop_pad_crop_and_pad_expanded_function_end_dims) -> y
    output: name='y' type=dtype('float32') shape=[10, 10, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_chw_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 20, 8]
    input: name='shape' type=dtype('int64') shape=[2]
    CenterCropPad(x, shape, axes=[1,2]) -> y
    output: name='y' type=dtype('float32') shape=[3, 10, 9].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_chw_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 20, 8]
    input: name='shape' type=dtype('int64') shape=[2]
    Constant(value=[2]) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_k2
    Constant(value_ints=[1,2]) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_axes_input
    Shape(x) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape_alldims
      Gather(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape_alldims, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_axes_input) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape
        Max(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape, shape) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_padded_sh
        Sub(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_padded_sh, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount
      Div(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount_left
        Sub(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount_left) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount_right
        Concat(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount_left, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pad_amount_right, axis=0) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pads
    Pad(x, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_pads, , CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_axes_input) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_padded_input
      Shape(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_padded_input) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape_alldims2
      Gather(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape_alldims2, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_axes_input) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape2
        Sub(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_x_shape2, shape) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_sh_diff
      Div(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_sh_diff, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_start_dims
        Add(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_start_dims, shape) -> CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_end_dims
      Slice(CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_padded_input, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_start_dims, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_end_dims, CenterCropPad_test_center_crop_pad_crop_axes_chw_expanded_function_axes_input) -> y
    output: name='y' type=dtype('float32') shape=[3, 10, 9].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_hwc_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[20, 8, 3]
    input: name='shape' type=dtype('int64') shape=[2]
    CenterCropPad(x, shape, axes=[0,1]) -> y
    output: name='y' type=dtype('float32') shape=[10, 9, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_hwc_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[20, 8, 3]
    input: name='shape' type=dtype('int64') shape=[2]
    Constant(value=[2]) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_k2
    Constant(value_ints=[0,1]) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_axes_input
    Shape(x) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape_alldims
      Gather(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape_alldims, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_axes_input) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape
        Max(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape, shape) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_padded_sh
        Sub(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_padded_sh, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount
      Div(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount_left
        Sub(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount_left) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount_right
        Concat(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount_left, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pad_amount_right, axis=0) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pads
    Pad(x, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_pads, , CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_axes_input) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_padded_input
      Shape(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_padded_input) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape_alldims2
      Gather(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape_alldims2, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_axes_input) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape2
        Sub(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_x_shape2, shape) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_sh_diff
      Div(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_sh_diff, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_start_dims
        Add(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_start_dims, shape) -> CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_end_dims
      Slice(CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_padded_input, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_start_dims, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_end_dims, CenterCropPad_test_center_crop_pad_crop_axes_hwc_expanded_function_axes_input) -> y
    output: name='y' type=dtype('float32') shape=[10, 9, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[20, 10, 3]
    input: name='shape' type=dtype('int64') shape=[3]
    CenterCropPad(x, shape) -> y
    output: name='y' type=dtype('float32') shape=[10, 7, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[20, 10, 3]
    input: name='shape' type=dtype('int64') shape=[3]
    Constant(value=[2]) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_k2
    Shape(x) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_x_shape
      Max(CenterCropPad_test_center_crop_pad_crop_expanded_function_x_shape, shape) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_padded_sh
      Sub(CenterCropPad_test_center_crop_pad_crop_expanded_function_padded_sh, CenterCropPad_test_center_crop_pad_crop_expanded_function_x_shape) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount
      Div(CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount_left
        Sub(CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount_left) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount_right
        Concat(CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount_left, CenterCropPad_test_center_crop_pad_crop_expanded_function_pad_amount_right, axis=0) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_pads
          Pad(x, CenterCropPad_test_center_crop_pad_crop_expanded_function_pads) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_padded_input
            Shape(CenterCropPad_test_center_crop_pad_crop_expanded_function_padded_input) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_x_shape2
              Sub(CenterCropPad_test_center_crop_pad_crop_expanded_function_x_shape2, shape) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_sh_diff
      Div(CenterCropPad_test_center_crop_pad_crop_expanded_function_sh_diff, CenterCropPad_test_center_crop_pad_crop_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_start_dims
        Add(CenterCropPad_test_center_crop_pad_crop_expanded_function_start_dims, shape) -> CenterCropPad_test_center_crop_pad_crop_expanded_function_end_dims
        Slice(CenterCropPad_test_center_crop_pad_crop_expanded_function_padded_input, CenterCropPad_test_center_crop_pad_crop_expanded_function_start_dims, CenterCropPad_test_center_crop_pad_crop_expanded_function_end_dims) -> y
    output: name='y' type=dtype('float32') shape=[10, 7, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[10, 7, 3]
    input: name='shape' type=dtype('int64') shape=[3]
    CenterCropPad(x, shape) -> y
    output: name='y' type=dtype('float32') shape=[20, 10, 3].
    
    ======================================================================
    ERROR: test_center_crop_pad_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[10, 7, 3]
    input: name='shape' type=dtype('int64') shape=[3]
    Constant(value=[2]) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_k2
    Shape(x) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_x_shape
      Max(CenterCropPad_test_center_crop_pad_pad_expanded_function_x_shape, shape) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_padded_sh
      Sub(CenterCropPad_test_center_crop_pad_pad_expanded_function_padded_sh, CenterCropPad_test_center_crop_pad_pad_expanded_function_x_shape) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount
      Div(CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_pad_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount_left
        Sub(CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount, CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount_left) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount_right
        Concat(CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount_left, CenterCropPad_test_center_crop_pad_pad_expanded_function_pad_amount_right, axis=0) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_pads
          Pad(x, CenterCropPad_test_center_crop_pad_pad_expanded_function_pads) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_padded_input
            Shape(CenterCropPad_test_center_crop_pad_pad_expanded_function_padded_input) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_x_shape2
              Sub(CenterCropPad_test_center_crop_pad_pad_expanded_function_x_shape2, shape) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_sh_diff
      Div(CenterCropPad_test_center_crop_pad_pad_expanded_function_sh_diff, CenterCropPad_test_center_crop_pad_pad_expanded_function_k2) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_start_dims
        Add(CenterCropPad_test_center_crop_pad_pad_expanded_function_start_dims, shape) -> CenterCropPad_test_center_crop_pad_pad_expanded_function_end_dims
        Slice(CenterCropPad_test_center_crop_pad_pad_expanded_function_padded_input, CenterCropPad_test_center_crop_pad_pad_expanded_function_start_dims, CenterCropPad_test_center_crop_pad_pad_expanded_function_end_dims) -> y
    output: name='y' type=dtype('float32') shape=[20, 10, 3].
    
    ======================================================================
    ERROR: test_clip_default_int8_max_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Less(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Less(13) node with name '''
    opset: domain='' version=13
    input: name='x' type=dtype('int8') shape=[3, 4, 5]
    input: name='max' type=dtype('int8') shape=[]
    Less(max, x) -> Clip_test_clip_default_int8_max_expanded_function_input_large_than_max
      Where(Clip_test_clip_default_int8_max_expanded_function_input_large_than_max, max, x) -> y
    output: name='y' type=dtype('int8') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_clip_default_int8_min_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Less(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Less(13) node with name '''
    opset: domain='' version=13
    input: name='x' type=dtype('int8') shape=[3, 4, 5]
    input: name='min' type=dtype('int8') shape=[]
    Less(x, min) -> Clip_test_clip_default_int8_min_expanded_function_input_less_than_min
      Where(Clip_test_clip_default_int8_min_expanded_function_input_less_than_min, min, x) -> y
    output: name='y' type=dtype('int8') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_col2im_5d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[1, 10, 12]
    input: name='image_shape' type=dtype('int64') shape=[3]
    input: name='block_shape' type=dtype('int64') shape=[3]
    Col2Im(input, image_shape, block_shape) -> output
    output: name='output' type=dtype('float32') shape=[1, 2, 3, 4, 5].
    
    ======================================================================
    ERROR: test_col2im_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[1, 5, 5]
    input: name='image_shape' type=dtype('int64') shape=[2]
    input: name='block_shape' type=dtype('int64') shape=[2]
    Col2Im(input, image_shape, block_shape) -> output
    output: name='output' type=dtype('float32') shape=[1, 1, 5, 5].
    
    ======================================================================
    ERROR: test_col2im_dilations_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[1, 4, 5]
    input: name='image_shape' type=dtype('int64') shape=[2]
    input: name='block_shape' type=dtype('int64') shape=[2]
    Col2Im(input, image_shape, block_shape, dilations=[1,5]) -> output
    output: name='output' type=dtype('float32') shape=[1, 1, 6, 6].
    
    ======================================================================
    ERROR: test_col2im_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[1, 5, 15]
    input: name='image_shape' type=dtype('int64') shape=[2]
    input: name='block_shape' type=dtype('int64') shape=[2]
    Col2Im(input, image_shape, block_shape, pads=[0,1,0,1]) -> output
    output: name='output' type=dtype('float32') shape=[1, 1, 5, 5].
    
    ======================================================================
    ERROR: test_col2im_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[1, 9, 4]
    input: name='image_shape' type=dtype('int64') shape=[2]
    input: name='block_shape' type=dtype('int64') shape=[2]
    Col2Im(input, image_shape, block_shape, strides=[2,2]) -> output
    output: name='output' type=dtype('float32') shape=[1, 1, 5, 5].
    
    ======================================================================
    ERROR: test_constant_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 107, in run
        v = next(iter(inputs.values()))
    StopIteration
    
    ======================================================================
    ERROR: test_constant_pad_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[1, 3, 4, 5]
    input: name='pads' type=dtype('int64') shape=[4]
    input: name='value' type=dtype('float32') shape=[]
    input: name='axes' type=dtype('int64') shape=[2]
    Pad(x, pads, value, axes, mode=b'constant') -> y
    output: name='y' type=dtype('float32') shape=[1, 3, 4, 12].
    
    ======================================================================
    ERROR: test_constant_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[1, 3, 4, 5]
    input: name='pads' type=dtype('int64') shape=[8]
    input: name='value' type=dtype('float32') shape=[]
    Pad(x, pads, value, mode=b'constant') -> y
    output: name='y' type=dtype('float32') shape=[1, 3, 7, 12].
    
    ======================================================================
    ERROR: test_div_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Div(14) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Div(14) node with name '''
    opset: domain='' version=14
    input: name='x' type=dtype('uint8') shape=[3, 4, 5]
    input: name='y' type=dtype('uint8') shape=[3, 4, 5]
    Div(x, y) -> z
    output: name='z' type=dtype('uint8') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_edge_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int32') shape=[1, 3, 4, 5]
    input: name='pads' type=dtype('int64') shape=[8]
    Pad(x, pads, mode=b'edge') -> y
    output: name='y' type=dtype('int32') shape=[1, 3, 6, 7].
    
    ======================================================================
    ERROR: test_elu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=1.0) -> Elu_test_elu_default_expanded_function_Alpha
      CastLike(Elu_test_elu_default_expanded_function_Alpha, x) -> Elu_test_elu_default_expanded_function_AlphaCast
    Constant(value=0.0) -> Elu_test_elu_default_expanded_function_Zero
      CastLike(Elu_test_elu_default_expanded_function_Zero, x) -> Elu_test_elu_default_expanded_function_ZeroCast
        Less(x, Elu_test_elu_default_expanded_function_ZeroCast) -> Elu_test_elu_default_expanded_function_XLessThanZero
    Constant(value=1.0) -> Elu_test_elu_default_expanded_function_One
      CastLike(Elu_test_elu_default_expanded_function_One, x) -> Elu_test_elu_default_expanded_function_OneCast
    Exp(x) -> Elu_test_elu_default_expanded_function_ExpX
      Sub(Elu_test_elu_default_expanded_function_ExpX, Elu_test_elu_default_expanded_function_OneCast) -> Elu_test_elu_default_expanded_function_ExpXSubOne
        Mul(Elu_test_elu_default_expanded_function_AlphaCast, Elu_test_elu_default_expanded_function_ExpXSubOne) -> Elu_test_elu_default_expanded_function_AlphaMulExpXSubOne
          Where(Elu_test_elu_default_expanded_function_XLessThanZero, Elu_test_elu_default_expanded_function_AlphaMulExpXSubOne, x) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_elu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3]
    Constant(value_float=2.0) -> Elu_test_elu_example_expanded_function_Alpha
      CastLike(Elu_test_elu_example_expanded_function_Alpha, x) -> Elu_test_elu_example_expanded_function_AlphaCast
    Constant(value=0.0) -> Elu_test_elu_example_expanded_function_Zero
      CastLike(Elu_test_elu_example_expanded_function_Zero, x) -> Elu_test_elu_example_expanded_function_ZeroCast
        Less(x, Elu_test_elu_example_expanded_function_ZeroCast) -> Elu_test_elu_example_expanded_function_XLessThanZero
    Constant(value=1.0) -> Elu_test_elu_example_expanded_function_One
      CastLike(Elu_test_elu_example_expanded_function_One, x) -> Elu_test_elu_example_expanded_function_OneCast
    Exp(x) -> Elu_test_elu_example_expanded_function_ExpX
      Sub(Elu_test_elu_example_expanded_function_ExpX, Elu_test_elu_example_expanded_function_OneCast) -> Elu_test_elu_example_expanded_function_ExpXSubOne
        Mul(Elu_test_elu_example_expanded_function_AlphaCast, Elu_test_elu_example_expanded_function_ExpXSubOne) -> Elu_test_elu_example_expanded_function_AlphaMulExpXSubOne
          Where(Elu_test_elu_example_expanded_function_XLessThanZero, Elu_test_elu_example_expanded_function_AlphaMulExpXSubOne, x) -> y
    output: name='y' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_elu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=2.0) -> Elu_test_elu_expanded_function_Alpha
      CastLike(Elu_test_elu_expanded_function_Alpha, x) -> Elu_test_elu_expanded_function_AlphaCast
    Constant(value=0.0) -> Elu_test_elu_expanded_function_Zero
      CastLike(Elu_test_elu_expanded_function_Zero, x) -> Elu_test_elu_expanded_function_ZeroCast
        Less(x, Elu_test_elu_expanded_function_ZeroCast) -> Elu_test_elu_expanded_function_XLessThanZero
    Constant(value=1.0) -> Elu_test_elu_expanded_function_One
      CastLike(Elu_test_elu_expanded_function_One, x) -> Elu_test_elu_expanded_function_OneCast
    Exp(x) -> Elu_test_elu_expanded_function_ExpX
      Sub(Elu_test_elu_expanded_function_ExpX, Elu_test_elu_expanded_function_OneCast) -> Elu_test_elu_expanded_function_ExpXSubOne
        Mul(Elu_test_elu_expanded_function_AlphaCast, Elu_test_elu_expanded_function_ExpXSubOne) -> Elu_test_elu_expanded_function_AlphaMulExpXSubOne
          Where(Elu_test_elu_expanded_function_XLessThanZero, Elu_test_elu_expanded_function_AlphaMulExpXSubOne, x) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_group_normalization_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 2, 2]
    input: name='scale' type=dtype('float32') shape=[2]
    input: name='bias' type=dtype('float32') shape=[2]
    GroupNormalization(x, scale, bias, epsilon=0.01, num_groups=2) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 2, 2].
    
    ======================================================================
    ERROR: test_group_normalization_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 2, 2]
    input: name='scale' type=dtype('float32') shape=[2]
    input: name='bias' type=dtype('float32') shape=[2]
    Cast(scale, to=1) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleT
    Constant(value=[0.0099999...) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_FloatEpsilon
      Cast(GroupNormalization_test_group_normalization_epsilon_expanded_function_FloatEpsilon, to=1) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Epsilon
    Shape(x) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_XShape
    Shape(x, start=1, end=2) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_C
    Constant(value=[2]) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_NumGroups
      Div(GroupNormalization_test_group_normalization_epsilon_expanded_function_C, GroupNormalization_test_group_normalization_epsilon_expanded_function_NumGroups) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_GroupSize
    Shape(x, start=0, end=1) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_N
    Shape(x, start=2) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_InstanceShape
      Concat(GroupNormalization_test_group_normalization_epsilon_expanded_function_N, GroupNormalization_test_group_normalization_epsilon_expanded_function_NumGroups, GroupNormalization_test_group_normalization_epsilon_expanded_function_GroupSize, GroupNormalization_test_group_normalization_epsilon_expanded_function_InstanceShape, axis=0) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_NewShape
        Reshape(x, GroupNormalization_test_group_normalization_epsilon_expanded_function_NewShape) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_XReshaped
    Constant(value_ints=[0,0,-1]) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Shape3D
      Reshape(GroupNormalization_test_group_normalization_epsilon_expanded_function_XReshaped, GroupNormalization_test_group_normalization_epsilon_expanded_function_Shape3D) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_X3D
        Mul(GroupNormalization_test_group_normalization_epsilon_expanded_function_X3D, GroupNormalization_test_group_normalization_epsilon_expanded_function_X3D) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Square
    Constant(value=[2]) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Axes2
      ReduceMean(GroupNormalization_test_group_normalization_epsilon_expanded_function_X3D, GroupNormalization_test_group_normalization_epsilon_expanded_function_Axes2) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Mean
        Mul(GroupNormalization_test_group_normalization_epsilon_expanded_function_Mean, GroupNormalization_test_group_normalization_epsilon_expanded_function_Mean) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_SquareOfMean
      ReduceMean(GroupNormalization_test_group_normalization_epsilon_expanded_function_Square, GroupNormalization_test_group_normalization_epsilon_expanded_function_Axes2) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_MeanOfSquare
        Sub(GroupNormalization_test_group_normalization_epsilon_expanded_function_MeanOfSquare, GroupNormalization_test_group_normalization_epsilon_expanded_function_SquareOfMean) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Var
        Add(GroupNormalization_test_group_normalization_epsilon_expanded_function_Var, GroupNormalization_test_group_normalization_epsilon_expanded_function_Epsilon) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(GroupNormalization_test_group_normalization_epsilon_expanded_function_VarPlusEpsilon) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_StdDev
        Sub(GroupNormalization_test_group_normalization_epsilon_expanded_function_X3D, GroupNormalization_test_group_normalization_epsilon_expanded_function_Mean) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Deviation
          Div(GroupNormalization_test_group_normalization_epsilon_expanded_function_Deviation, GroupNormalization_test_group_normalization_epsilon_expanded_function_StdDev) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Normalized
    Constant(value_ints=[1,-1,1]) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleShape
      Reshape(GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleT, GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleShape) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleReshaped
        Mul(GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleReshaped, GroupNormalization_test_group_normalization_epsilon_expanded_function_Normalized) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Scaled
    Cast(bias, to=1) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_BiasT
      Reshape(GroupNormalization_test_group_normalization_epsilon_expanded_function_BiasT, GroupNormalization_test_group_normalization_epsilon_expanded_function_ScaleShape) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_BiasReshaped
        Add(GroupNormalization_test_group_normalization_epsilon_expanded_function_Scaled, GroupNormalization_test_group_normalization_epsilon_expanded_function_BiasReshaped) -> GroupNormalization_test_group_normalization_epsilon_expanded_function_Biased
      Reshape(GroupNormalization_test_group_normalization_epsilon_expanded_function_Biased, GroupNormalization_test_group_normalization_epsilon_expanded_function_XShape) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 2, 2].
    
    ======================================================================
    ERROR: test_group_normalization_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 2, 2]
    input: name='scale' type=dtype('float32') shape=[2]
    input: name='bias' type=dtype('float32') shape=[2]
    GroupNormalization(x, scale, bias, num_groups=2) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 2, 2].
    
    ======================================================================
    ERROR: test_group_normalization_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 2, 2]
    input: name='scale' type=dtype('float32') shape=[2]
    input: name='bias' type=dtype('float32') shape=[2]
    Cast(scale, to=1) -> GroupNormalization_test_group_normalization_example_expanded_function_ScaleT
    Constant(value=[9.9999997...) -> GroupNormalization_test_group_normalization_example_expanded_function_FloatEpsilon
      Cast(GroupNormalization_test_group_normalization_example_expanded_function_FloatEpsilon, to=1) -> GroupNormalization_test_group_normalization_example_expanded_function_Epsilon
    Shape(x) -> GroupNormalization_test_group_normalization_example_expanded_function_XShape
    Shape(x, start=1, end=2) -> GroupNormalization_test_group_normalization_example_expanded_function_C
    Constant(value=[2]) -> GroupNormalization_test_group_normalization_example_expanded_function_NumGroups
      Div(GroupNormalization_test_group_normalization_example_expanded_function_C, GroupNormalization_test_group_normalization_example_expanded_function_NumGroups) -> GroupNormalization_test_group_normalization_example_expanded_function_GroupSize
    Shape(x, start=0, end=1) -> GroupNormalization_test_group_normalization_example_expanded_function_N
    Shape(x, start=2) -> GroupNormalization_test_group_normalization_example_expanded_function_InstanceShape
      Concat(GroupNormalization_test_group_normalization_example_expanded_function_N, GroupNormalization_test_group_normalization_example_expanded_function_NumGroups, GroupNormalization_test_group_normalization_example_expanded_function_GroupSize, GroupNormalization_test_group_normalization_example_expanded_function_InstanceShape, axis=0) -> GroupNormalization_test_group_normalization_example_expanded_function_NewShape
        Reshape(x, GroupNormalization_test_group_normalization_example_expanded_function_NewShape) -> GroupNormalization_test_group_normalization_example_expanded_function_XReshaped
    Constant(value_ints=[0,0,-1]) -> GroupNormalization_test_group_normalization_example_expanded_function_Shape3D
      Reshape(GroupNormalization_test_group_normalization_example_expanded_function_XReshaped, GroupNormalization_test_group_normalization_example_expanded_function_Shape3D) -> GroupNormalization_test_group_normalization_example_expanded_function_X3D
        Mul(GroupNormalization_test_group_normalization_example_expanded_function_X3D, GroupNormalization_test_group_normalization_example_expanded_function_X3D) -> GroupNormalization_test_group_normalization_example_expanded_function_Square
    Constant(value=[2]) -> GroupNormalization_test_group_normalization_example_expanded_function_Axes2
      ReduceMean(GroupNormalization_test_group_normalization_example_expanded_function_X3D, GroupNormalization_test_group_normalization_example_expanded_function_Axes2) -> GroupNormalization_test_group_normalization_example_expanded_function_Mean
        Mul(GroupNormalization_test_group_normalization_example_expanded_function_Mean, GroupNormalization_test_group_normalization_example_expanded_function_Mean) -> GroupNormalization_test_group_normalization_example_expanded_function_SquareOfMean
      ReduceMean(GroupNormalization_test_group_normalization_example_expanded_function_Square, GroupNormalization_test_group_normalization_example_expanded_function_Axes2) -> GroupNormalization_test_group_normalization_example_expanded_function_MeanOfSquare
        Sub(GroupNormalization_test_group_normalization_example_expanded_function_MeanOfSquare, GroupNormalization_test_group_normalization_example_expanded_function_SquareOfMean) -> GroupNormalization_test_group_normalization_example_expanded_function_Var
        Add(GroupNormalization_test_group_normalization_example_expanded_function_Var, GroupNormalization_test_group_normalization_example_expanded_function_Epsilon) -> GroupNormalization_test_group_normalization_example_expanded_function_VarPlusEpsilon
          Sqrt(GroupNormalization_test_group_normalization_example_expanded_function_VarPlusEpsilon) -> GroupNormalization_test_group_normalization_example_expanded_function_StdDev
        Sub(GroupNormalization_test_group_normalization_example_expanded_function_X3D, GroupNormalization_test_group_normalization_example_expanded_function_Mean) -> GroupNormalization_test_group_normalization_example_expanded_function_Deviation
          Div(GroupNormalization_test_group_normalization_example_expanded_function_Deviation, GroupNormalization_test_group_normalization_example_expanded_function_StdDev) -> GroupNormalization_test_group_normalization_example_expanded_function_Normalized
    Constant(value_ints=[1,-1,1]) -> GroupNormalization_test_group_normalization_example_expanded_function_ScaleShape
      Reshape(GroupNormalization_test_group_normalization_example_expanded_function_ScaleT, GroupNormalization_test_group_normalization_example_expanded_function_ScaleShape) -> GroupNormalization_test_group_normalization_example_expanded_function_ScaleReshaped
        Mul(GroupNormalization_test_group_normalization_example_expanded_function_ScaleReshaped, GroupNormalization_test_group_normalization_example_expanded_function_Normalized) -> GroupNormalization_test_group_normalization_example_expanded_function_Scaled
    Cast(bias, to=1) -> GroupNormalization_test_group_normalization_example_expanded_function_BiasT
      Reshape(GroupNormalization_test_group_normalization_example_expanded_function_BiasT, GroupNormalization_test_group_normalization_example_expanded_function_ScaleShape) -> GroupNormalization_test_group_normalization_example_expanded_function_BiasReshaped
        Add(GroupNormalization_test_group_normalization_example_expanded_function_Scaled, GroupNormalization_test_group_normalization_example_expanded_function_BiasReshaped) -> GroupNormalization_test_group_normalization_example_expanded_function_Biased
      Reshape(GroupNormalization_test_group_normalization_example_expanded_function_Biased, GroupNormalization_test_group_normalization_example_expanded_function_XShape) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 2, 2].
    
    ======================================================================
    ERROR: test_gru_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.RuntimeException: [ONNXRuntimeError] : 6 : RUNTIME_EXCEPTION : Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/deep_cpu_gru.h:55 onnxruntime::DeepCpuGruOp::DeepCpuGruOp(const onnxruntime::OpKernelInfo&) layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 6 : RUNTIME_EXCEPTION : Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/deep_cpu_gru.h:55 onnxruntime::DeepCpuGruOp::DeepCpuGruOp(const onnxruntime::OpKernelInfo&) layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    '
    opset: domain='' version=14
    input: name='X' type=dtype('float32') shape=[3, 1, 2]
    input: name='W' type=dtype('float32') shape=[1, 18, 2]
    input: name='R' type=dtype('float32') shape=[1, 18, 6]
    GRU(X, W, R, hidden_size=6, layout=1) -> Y, Y_h
    output: name='Y' type=dtype('float32') shape=[3, 1, 1, 6]
    output: name='Y_h' type=dtype('float32') shape=[3, 1, 6].
    
    ======================================================================
    ERROR: test_hardsigmoid_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=0.20000000298023224) -> HardSigmoid_test_hardsigmoid_default_expanded_function_Alpha
      CastLike(HardSigmoid_test_hardsigmoid_default_expanded_function_Alpha, x) -> HardSigmoid_test_hardsigmoid_default_expanded_function_AlphaCast
        Mul(x, HardSigmoid_test_hardsigmoid_default_expanded_function_AlphaCast) -> HardSigmoid_test_hardsigmoid_default_expanded_function_AlphaMulX
    Constant(value_float=0.5) -> HardSigmoid_test_hardsigmoid_default_expanded_function_Beta
      CastLike(HardSigmoid_test_hardsigmoid_default_expanded_function_Beta, x) -> HardSigmoid_test_hardsigmoid_default_expanded_function_BetaCast
        Add(HardSigmoid_test_hardsigmoid_default_expanded_function_AlphaMulX, HardSigmoid_test_hardsigmoid_default_expanded_function_BetaCast) -> HardSigmoid_test_hardsigmoid_default_expanded_function_AlphaMulXAddBeta
    Constant(value=0.0) -> HardSigmoid_test_hardsigmoid_default_expanded_function_Zero
      CastLike(HardSigmoid_test_hardsigmoid_default_expanded_function_Zero, x) -> HardSigmoid_test_hardsigmoid_default_expanded_function_ZeroCast
    Constant(value=1.0) -> HardSigmoid_test_hardsigmoid_default_expanded_function_One
      CastLike(HardSigmoid_test_hardsigmoid_default_expanded_function_One, x) -> HardSigmoid_test_hardsigmoid_default_expanded_function_OneCast
        Min(HardSigmoid_test_hardsigmoid_default_expanded_function_AlphaMulXAddBeta, HardSigmoid_test_hardsigmoid_default_expanded_function_OneCast) -> HardSigmoid_test_hardsigmoid_default_expanded_function_MinOneOrAlphaMulXAddBeta
        Max(HardSigmoid_test_hardsigmoid_default_expanded_function_MinOneOrAlphaMulXAddBeta, HardSigmoid_test_hardsigmoid_default_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_hardsigmoid_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3]
    Constant(value_float=0.5) -> HardSigmoid_test_hardsigmoid_example_expanded_function_Alpha
      CastLike(HardSigmoid_test_hardsigmoid_example_expanded_function_Alpha, x) -> HardSigmoid_test_hardsigmoid_example_expanded_function_AlphaCast
        Mul(x, HardSigmoid_test_hardsigmoid_example_expanded_function_AlphaCast) -> HardSigmoid_test_hardsigmoid_example_expanded_function_AlphaMulX
    Constant(value_float=0.6000000238418579) -> HardSigmoid_test_hardsigmoid_example_expanded_function_Beta
      CastLike(HardSigmoid_test_hardsigmoid_example_expanded_function_Beta, x) -> HardSigmoid_test_hardsigmoid_example_expanded_function_BetaCast
        Add(HardSigmoid_test_hardsigmoid_example_expanded_function_AlphaMulX, HardSigmoid_test_hardsigmoid_example_expanded_function_BetaCast) -> HardSigmoid_test_hardsigmoid_example_expanded_function_AlphaMulXAddBeta
    Constant(value=0.0) -> HardSigmoid_test_hardsigmoid_example_expanded_function_Zero
      CastLike(HardSigmoid_test_hardsigmoid_example_expanded_function_Zero, x) -> HardSigmoid_test_hardsigmoid_example_expanded_function_ZeroCast
    Constant(value=1.0) -> HardSigmoid_test_hardsigmoid_example_expanded_function_One
      CastLike(HardSigmoid_test_hardsigmoid_example_expanded_function_One, x) -> HardSigmoid_test_hardsigmoid_example_expanded_function_OneCast
        Min(HardSigmoid_test_hardsigmoid_example_expanded_function_AlphaMulXAddBeta, HardSigmoid_test_hardsigmoid_example_expanded_function_OneCast) -> HardSigmoid_test_hardsigmoid_example_expanded_function_MinOneOrAlphaMulXAddBeta
        Max(HardSigmoid_test_hardsigmoid_example_expanded_function_MinOneOrAlphaMulXAddBeta, HardSigmoid_test_hardsigmoid_example_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_hardsigmoid_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=0.5) -> HardSigmoid_test_hardsigmoid_expanded_function_Alpha
      CastLike(HardSigmoid_test_hardsigmoid_expanded_function_Alpha, x) -> HardSigmoid_test_hardsigmoid_expanded_function_AlphaCast
        Mul(x, HardSigmoid_test_hardsigmoid_expanded_function_AlphaCast) -> HardSigmoid_test_hardsigmoid_expanded_function_AlphaMulX
    Constant(value_float=0.6000000238418579) -> HardSigmoid_test_hardsigmoid_expanded_function_Beta
      CastLike(HardSigmoid_test_hardsigmoid_expanded_function_Beta, x) -> HardSigmoid_test_hardsigmoid_expanded_function_BetaCast
        Add(HardSigmoid_test_hardsigmoid_expanded_function_AlphaMulX, HardSigmoid_test_hardsigmoid_expanded_function_BetaCast) -> HardSigmoid_test_hardsigmoid_expanded_function_AlphaMulXAddBeta
    Constant(value=0.0) -> HardSigmoid_test_hardsigmoid_expanded_function_Zero
      CastLike(HardSigmoid_test_hardsigmoid_expanded_function_Zero, x) -> HardSigmoid_test_hardsigmoid_expanded_function_ZeroCast
    Constant(value=1.0) -> HardSigmoid_test_hardsigmoid_expanded_function_One
      CastLike(HardSigmoid_test_hardsigmoid_expanded_function_One, x) -> HardSigmoid_test_hardsigmoid_expanded_function_OneCast
        Min(HardSigmoid_test_hardsigmoid_expanded_function_AlphaMulXAddBeta, HardSigmoid_test_hardsigmoid_expanded_function_OneCast) -> HardSigmoid_test_hardsigmoid_expanded_function_MinOneOrAlphaMulXAddBeta
        Max(HardSigmoid_test_hardsigmoid_expanded_function_MinOneOrAlphaMulXAddBeta, HardSigmoid_test_hardsigmoid_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_identity_opt_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 1\n    shape {\n      dim {\n        dim_value: 5\n      }\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_identity_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_if_opt_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 629, in to_sequence
        outputs[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 1\n    shape {\n      dim {\n        dim_value: 5\n      }\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[3, 4]
    input: name='W' type=dtype('float32') shape=[3, 4]
    input: name='B' type=dtype('float32') shape=[3, 4]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Zero1D
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XShape, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Rank, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_ReducedShape
    Flatten(X, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XU) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Square, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Var, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Deviation, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Scaled, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Biased, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis0_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[3, 4]
    output: name='Mean' type=dtype('float32') shape=[1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[3, 4]
    input: name='W' type=dtype('float32') shape=[4]
    input: name='B' type=dtype('float32') shape=[4]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Zero1D
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XShape, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Rank, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_ReducedShape
    Flatten(X, axis=1) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XU) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Square, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Var, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Deviation, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Scaled, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Biased, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis1_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[3, 4]
    output: name='Mean' type=dtype('float32') shape=[3, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[3, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[3, 4]
    input: name='W' type=dtype('float32') shape=[4]
    input: name='B' type=dtype('float32') shape=[4]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Zero1D
    Constant(value=[-1]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XShape, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_ReducedShape
    Flatten(X, axis=-1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XU) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Square, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Var, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Deviation, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Scaled, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Biased, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis_negative_1_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[3, 4]
    output: name='Mean' type=dtype('float32') shape=[3, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[3, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[3, 4]
    input: name='W' type=dtype('float32') shape=[3, 4]
    input: name='B' type=dtype('float32') shape=[3, 4]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Zero1D
    Constant(value=[-2]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XShape, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_ReducedShape
    Flatten(X, axis=-2) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XU) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Square, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Var, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XU, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Deviation, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Scaled, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Biased, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_2d_axis_negative_2_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[3, 4]
    output: name='Mean' type=dtype('float32') shape=[1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis0_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 5]
    input: name='W' type=dtype('float32') shape=[2, 3, 5]
    input: name='B' type=dtype('float32') shape=[2, 3, 5]
    Constant(value=0.10000000...) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Zero1D
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XShape, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Rank, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_ReducedShape
    Flatten(X, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XU) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Square, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Var, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Deviation, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Scaled, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Biased, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis0_epsilon_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 5]
    output: name='Mean' type=dtype('float32') shape=[1, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 5]
    input: name='W' type=dtype('float32') shape=[3, 5]
    input: name='B' type=dtype('float32') shape=[3, 5]
    Constant(value=0.10000000...) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Zero1D
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XShape, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Rank, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_ReducedShape
    Flatten(X, axis=1) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XU) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Square, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Var, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Deviation, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Scaled, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Biased, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis1_epsilon_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 5]
    input: name='W' type=dtype('float32') shape=[5]
    input: name='B' type=dtype('float32') shape=[5]
    Constant(value=0.10000000...) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Zero1D
    Constant(value=[2]) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XShape, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Rank, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_ReducedShape
    Flatten(X, axis=2) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XU) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Square, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Var, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Deviation, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Scaled, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Biased, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis2_epsilon_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 5]
    input: name='W' type=dtype('float32') shape=[5]
    input: name='B' type=dtype('float32') shape=[5]
    Constant(value=0.10000000...) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Zero1D
    Constant(value=[-1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XShape, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_ReducedShape
    Flatten(X, axis=-1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XU) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Square, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Var, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Deviation, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Scaled, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Biased, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis_negative_1_epsilon_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 5]
    input: name='W' type=dtype('float32') shape=[3, 5]
    input: name='B' type=dtype('float32') shape=[3, 5]
    Constant(value=0.10000000...) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Zero1D
    Constant(value=[-2]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XShape, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_ReducedShape
    Flatten(X, axis=-2) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XU) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Square, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Var, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Deviation, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Scaled, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Biased, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis_negative_2_epsilon_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 5]
    input: name='W' type=dtype('float32') shape=[2, 3, 5]
    input: name='B' type=dtype('float32') shape=[2, 3, 5]
    Constant(value=0.10000000...) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Zero1D
    Constant(value=[-3]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XShape, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_ReducedShape
    Flatten(X, axis=-3) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XU) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Square, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Var, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XU, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Deviation, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Scaled, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Biased, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_3d_axis_negative_3_epsilon_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 5]
    output: name='Mean' type=dtype('float32') shape=[1, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='B' type=dtype('float32') shape=[2, 3, 4, 5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Zero1D
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Rank, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_ReducedShape
    Flatten(X, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis0_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[1, 1, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[1, 1, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[3, 4, 5]
    input: name='B' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Zero1D
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Rank, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_ReducedShape
    Flatten(X, axis=1) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis1_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 1, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 1, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[4, 5]
    input: name='B' type=dtype('float32') shape=[4, 5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Zero1D
    Constant(value=[2]) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Rank, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_ReducedShape
    Flatten(X, axis=2) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis2_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[5]
    input: name='B' type=dtype('float32') shape=[5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Zero1D
    Constant(value=[3]) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_PrefixShape
    Sub(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Rank, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_ReducedShape
    Flatten(X, axis=3) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis3_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 4, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 4, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[5]
    input: name='B' type=dtype('float32') shape=[5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Zero1D
    Constant(value=[-1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_ReducedShape
    Flatten(X, axis=-1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_1_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 4, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 4, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[4, 5]
    input: name='B' type=dtype('float32') shape=[4, 5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Zero1D
    Constant(value=[-2]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_ReducedShape
    Flatten(X, axis=-2) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_2_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[3, 4, 5]
    input: name='B' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Zero1D
    Constant(value=[-3]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_ReducedShape
    Flatten(X, axis=-3) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_3_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 1, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 1, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_4_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='B' type=dtype('float32') shape=[2, 3, 4, 5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Zero1D
    Constant(value=[-4]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XShape, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_ReducedShape
    Flatten(X, axis=-4) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XU) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Square, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Var, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XU, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Deviation, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Scaled, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Biased, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_4d_axis_negative_4_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[1, 1, 1, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[1, 1, 1, 1].
    
    ======================================================================
    ERROR: test_layer_normalization_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='W' type=dtype('float32') shape=[5]
    input: name='B' type=dtype('float32') shape=[5]
    Constant(value=9.99999974...) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_FloatEpsilon
      Cast(LayerNormalization_test_layer_normalization_default_axis_expanded_function_FloatEpsilon, to=1) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Epsilon
    Shape(X) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_XShape
      Size(LayerNormalization_test_layer_normalization_default_axis_expanded_function_XShape) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Rank
    Constant(value=[0]) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Zero1D
    Constant(value=[-1]) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Axis1D
      Slice(LayerNormalization_test_layer_normalization_default_axis_expanded_function_XShape, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Zero1D, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_PrefixShape
    Neg(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Axis1D) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_NumReducedAxes
      ConstantOfShape(LayerNormalization_test_layer_normalization_default_axis_expanded_function_NumReducedAxes, value=[1]) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_SuffixShape
        Concat(LayerNormalization_test_layer_normalization_default_axis_expanded_function_PrefixShape, LayerNormalization_test_layer_normalization_default_axis_expanded_function_SuffixShape, axis=0) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_ReducedShape
    Flatten(X, axis=-1) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_X2D
      Cast(LayerNormalization_test_layer_normalization_default_axis_expanded_function_X2D, to=1) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_XU
        Mul(LayerNormalization_test_layer_normalization_default_axis_expanded_function_XU, LayerNormalization_test_layer_normalization_default_axis_expanded_function_XU) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Square
    Constant(value=[1]) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Axes_1
      ReduceMean(LayerNormalization_test_layer_normalization_default_axis_expanded_function_XU, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Mean2D
        Mul(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_SquareOfMean
      ReduceMean(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Square, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Axes_1) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_MeanOfSquare
        Sub(LayerNormalization_test_layer_normalization_default_axis_expanded_function_MeanOfSquare, LayerNormalization_test_layer_normalization_default_axis_expanded_function_SquareOfMean) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Var
        Add(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Var, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Epsilon) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_VarPlusEpsilon
          Sqrt(LayerNormalization_test_layer_normalization_default_axis_expanded_function_VarPlusEpsilon) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_StdDev
            Reciprocal(LayerNormalization_test_layer_normalization_default_axis_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_InvStdDev2D
          Reshape(LayerNormalization_test_layer_normalization_default_axis_expanded_function_InvStdDev2D, LayerNormalization_test_layer_normalization_default_axis_expanded_function_ReducedShape) -> InvStdDev
        Sub(LayerNormalization_test_layer_normalization_default_axis_expanded_function_XU, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Mean2D) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Deviation
          Div(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Deviation, LayerNormalization_test_layer_normalization_default_axis_expanded_function_StdDev) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Normalized
            Cast(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Normalized, to=1) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_NormalizedT
    Flatten(W, axis=0) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Scale2D
      Mul(LayerNormalization_test_layer_normalization_default_axis_expanded_function_NormalizedT, LayerNormalization_test_layer_normalization_default_axis_expanded_function_Scale2D) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Scaled
    Flatten(B, axis=0) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_B2D
      Add(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Scaled, LayerNormalization_test_layer_normalization_default_axis_expanded_function_B2D) -> LayerNormalization_test_layer_normalization_default_axis_expanded_function_Biased
      Reshape(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Biased, LayerNormalization_test_layer_normalization_default_axis_expanded_function_XShape) -> Y
    Reshape(LayerNormalization_test_layer_normalization_default_axis_expanded_function_Mean2D, LayerNormalization_test_layer_normalization_default_axis_expanded_function_ReducedShape) -> Mean
    output: name='Y' type=dtype('float32') shape=[2, 3, 4, 5]
    output: name='Mean' type=dtype('float32') shape=[2, 3, 4, 1]
    output: name='InvStdDev' type=dtype('float32') shape=[2, 3, 4, 1].
    
    ======================================================================
    ERROR: test_logsoftmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[0]) -> LogSoftmax_test_logsoftmax_axis_0_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_axis_0_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_axis_0_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_axis_0_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_logsoftmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[1]) -> LogSoftmax_test_logsoftmax_axis_1_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_axis_1_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_axis_1_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_axis_1_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_logsoftmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[2]) -> LogSoftmax_test_logsoftmax_axis_2_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_axis_2_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_axis_2_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_axis_2_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_logsoftmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[-1]) -> LogSoftmax_test_logsoftmax_default_axis_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_default_axis_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_default_axis_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_default_axis_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_logsoftmax_example_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[1, 3]
    Constant(value=[-1]) -> LogSoftmax_test_logsoftmax_example_1_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_example_1_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_example_1_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_example_1_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_example_1_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_example_1_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_example_1_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_example_1_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[1, 3].
    
    ======================================================================
    ERROR: test_logsoftmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[2, 4]
    Constant(value=[-1]) -> LogSoftmax_test_logsoftmax_large_number_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_large_number_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_large_number_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_large_number_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_large_number_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_large_number_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_large_number_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_large_number_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[2, 4].
    
    ======================================================================
    ERROR: test_logsoftmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[-1]) -> LogSoftmax_test_logsoftmax_negative_axis_expanded_function_axes
      ReduceMax(x, LogSoftmax_test_logsoftmax_negative_axis_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_ReduceMax
        Sub(x, LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_ReduceMax) -> LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Sub
          Exp(LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Sub) -> LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Exp
      ReduceSum(LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Exp, LogSoftmax_test_logsoftmax_negative_axis_expanded_function_axes, keepdims=1) -> LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_ReduceSum
        Log(LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_ReduceSum) -> LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Log
          Sub(LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Sub, LogSoftmax_test_logsoftmax_negative_axis_expanded_function_X_Log) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_loop16_seq_none_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 1\n    shape {\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_lstm_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.RuntimeException: [ONNXRuntimeError] : 6 : RUNTIME_EXCEPTION : Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/lstm_base.h:52 onnxruntime::LSTMBase::LSTMBase(const onnxruntime::OpKernelInfo&) layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 6 : RUNTIME_EXCEPTION : Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/lstm_base.h:52 onnxruntime::LSTMBase::LSTMBase(const onnxruntime::OpKernelInfo&) layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    '
    opset: domain='' version=14
    input: name='X' type=dtype('float32') shape=[3, 1, 2]
    input: name='W' type=dtype('float32') shape=[1, 28, 2]
    input: name='R' type=dtype('float32') shape=[1, 28, 7]
    LSTM(X, W, R, hidden_size=7, layout=1) -> Y, Y_h
    output: name='Y' type=dtype('float32') shape=[3, 1, 1, 7]
    output: name='Y_h' type=dtype('float32') shape=[3, 1, 7].
    
    ======================================================================
    ERROR: test_max_int16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('int16') shape=[3]
    input: name='data_1' type=dtype('int16') shape=[3]
    Max(data_0, data_1) -> result
    output: name='result' type=dtype('int16') shape=[3].
    
    ======================================================================
    ERROR: test_max_int8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('int8') shape=[3]
    input: name='data_1' type=dtype('int8') shape=[3]
    Max(data_0, data_1) -> result
    output: name='result' type=dtype('int8') shape=[3].
    
    ======================================================================
    ERROR: test_max_uint16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('uint16') shape=[3]
    input: name='data_1' type=dtype('uint16') shape=[3]
    Max(data_0, data_1) -> result
    output: name='result' type=dtype('uint16') shape=[3].
    
    ======================================================================
    ERROR: test_max_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Max(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('uint8') shape=[3]
    input: name='data_1' type=dtype('uint8') shape=[3]
    Max(data_0, data_1) -> result
    output: name='result' type=dtype('uint8') shape=[3].
    
    ======================================================================
    ERROR: test_min_int16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('int16') shape=[3]
    input: name='data_1' type=dtype('int16') shape=[3]
    Min(data_0, data_1) -> result
    output: name='result' type=dtype('int16') shape=[3].
    
    ======================================================================
    ERROR: test_min_int8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('int8') shape=[3]
    input: name='data_1' type=dtype('int8') shape=[3]
    Min(data_0, data_1) -> result
    output: name='result' type=dtype('int8') shape=[3].
    
    ======================================================================
    ERROR: test_min_uint16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('uint16') shape=[3]
    input: name='data_1' type=dtype('uint16') shape=[3]
    Min(data_0, data_1) -> result
    output: name='result' type=dtype('uint16') shape=[3].
    
    ======================================================================
    ERROR: test_min_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Min(13) node with name '''
    opset: domain='' version=13
    input: name='data_0' type=dtype('uint8') shape=[3]
    input: name='data_1' type=dtype('uint8') shape=[3]
    Min(data_0, data_1) -> result
    output: name='result' type=dtype('uint8') shape=[3].
    
    ======================================================================
    ERROR: test_mish_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[10000]
    Mish(X) -> Y
    output: name='Y' type=dtype('float32') shape=[10000].
    
    ======================================================================
    ERROR: test_mish_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[10000]
    Softplus(X) -> Mish_test_mish_expanded_function_Softplus_X
      Tanh(Mish_test_mish_expanded_function_Softplus_X) -> Mish_test_mish_expanded_function_TanHSoftplusX
        Mul(X, Mish_test_mish_expanded_function_TanHSoftplusX) -> Y
    output: name='Y' type=dtype('float32') shape=[10000].
    
    ======================================================================
    ERROR: test_momentum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Momentum(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Momentum(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X' type=dtype('float32') shape=[2]
    input: name='G' type=dtype('float32') shape=[2]
    input: name='V' type=dtype('float32') shape=[2]
    Momentum[ai.onnx.preview.training](R, T, X, G, V, alpha=0.95, beta=0.10, mode=b'standard', norm_coefficient=0.00) -> X_new, V_new
    output: name='X_new' type=dtype('float32') shape=[2]
    output: name='V_new' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_momentum_multiple_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Momentum(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Momentum(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X1' type=dtype('float32') shape=[1]
    input: name='X2' type=dtype('float32') shape=[2]
    input: name='G1' type=dtype('float32') shape=[1]
    input: name='G2' type=dtype('float32') shape=[2]
    input: name='H1' type=dtype('float32') shape=[1]
    input: name='H2' type=dtype('float32') shape=[2]
    Momentum[ai.onnx.preview.training](R, T, X1, X2, G1, G2, H1, H2, alpha=0.95, beta=0.85, mode=b'standard', norm_coefficient=0.00) -> X1_new, X2_new, V1_new, V2_new
    output: name='X1_new' type=dtype('float32') shape=[1]
    output: name='X2_new' type=dtype('float32') shape=[2]
    output: name='V1_new' type=dtype('float32') shape=[1]
    output: name='V2_new' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_mul_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(14) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(14) node with name '''
    opset: domain='' version=14
    input: name='x' type=dtype('uint8') shape=[3, 4, 5]
    input: name='y' type=dtype('uint8') shape=[3, 4, 5]
    Mul(x, y) -> z
    output: name='z' type=dtype('uint8') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_mvn_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[3, 3, 3, 1]
    Constant(value=2.0) -> MeanVarianceNormalization_test_mvn_expanded_function_Exponent
      Pow(X, MeanVarianceNormalization_test_mvn_expanded_function_Exponent) -> MeanVarianceNormalization_test_mvn_expanded_function_X_squared
    Constant(value=9.99999971...) -> MeanVarianceNormalization_test_mvn_expanded_function_Epsilon
    Constant(value_ints=[0,2,3]) -> MeanVarianceNormalization_test_mvn_expanded_function_axes
      ReduceMean(X, MeanVarianceNormalization_test_mvn_expanded_function_axes) -> MeanVarianceNormalization_test_mvn_expanded_function_X_RM
      Pow(MeanVarianceNormalization_test_mvn_expanded_function_X_RM, MeanVarianceNormalization_test_mvn_expanded_function_Exponent) -> MeanVarianceNormalization_test_mvn_expanded_function_EX_squared
    ReduceMean(MeanVarianceNormalization_test_mvn_expanded_function_X_squared, MeanVarianceNormalization_test_mvn_expanded_function_axes) -> MeanVarianceNormalization_test_mvn_expanded_function_E_Xsquared
      Sub(MeanVarianceNormalization_test_mvn_expanded_function_E_Xsquared, MeanVarianceNormalization_test_mvn_expanded_function_EX_squared) -> MeanVarianceNormalization_test_mvn_expanded_function_Variance
        Sqrt(MeanVarianceNormalization_test_mvn_expanded_function_Variance) -> MeanVarianceNormalization_test_mvn_expanded_function_STD
      Add(MeanVarianceNormalization_test_mvn_expanded_function_STD, MeanVarianceNormalization_test_mvn_expanded_function_Epsilon) -> MeanVarianceNormalization_test_mvn_expanded_function_Processed_STD
    Sub(X, MeanVarianceNormalization_test_mvn_expanded_function_X_RM) -> MeanVarianceNormalization_test_mvn_expanded_function_X_variance
      Div(MeanVarianceNormalization_test_mvn_expanded_function_X_variance, MeanVarianceNormalization_test_mvn_expanded_function_Processed_STD) -> Y
    output: name='Y' type=dtype('float32') shape=[3, 3, 3, 1].
    
    ======================================================================
    ERROR: test_nesterov_momentum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Momentum(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Momentum(-1) is not a registered function/op'
    opset: domain='ai.onnx.preview.training' version=1
    input: name='R' type=dtype('float32') shape=[]
    input: name='T' type=dtype('int64') shape=[]
    input: name='X' type=dtype('float32') shape=[2]
    input: name='G' type=dtype('float32') shape=[2]
    input: name='V' type=dtype('float32') shape=[2]
    Momentum[ai.onnx.preview.training](R, T, X, G, V, alpha=0.95, beta=1.00, mode=b'nesterov', norm_coefficient=0.01) -> X_new, V_new
    output: name='X_new' type=dtype('float32') shape=[2]
    output: name='V_new' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_optional_get_element_optional_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 6\n    shape {\n      dim {\n        dim_value: 4\n      }\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_optional_get_element_optional_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='optional_input' type='?' shape=None
    OptionalGetElement(optional_input) -> output
    output: name='output' type=dtype('float32') shape=[4].
    
    ======================================================================
    ERROR: test_optional_get_element_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='optional_input' type='?' shape=None
    OptionalGetElement(optional_input) -> output
    output: name='output' type='?' shape=None.
    
    ======================================================================
    ERROR: test_optional_get_element_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='optional_input' type=dtype('float32') shape=[4]
    OptionalGetElement(optional_input) -> output
    output: name='output' type=dtype('float32') shape=[4].
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_name_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    OptionalHasElement() -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_name_tensor_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    OptionalHasElement() -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    OptionalHasElement() -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_tensor_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    OptionalHasElement() -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_optional_has_element_empty_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='optional_input' type='?' shape=None
    OptionalHasElement(optional_input) -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_optional_has_element_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='optional_input' type='?' shape=None
    OptionalHasElement(optional_input) -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_optional_has_element_tensor_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='optional_input' type='?' shape=None
    OptionalHasElement(optional_input) -> output
    output: name='output' type=dtype('bool') shape=[].
    
    ======================================================================
    ERROR: test_pow_types_float32_uint32_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Pow(15) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Pow(15) node with name '''
    opset: domain='' version=15
    input: name='x' type=dtype('float32') shape=[3]
    input: name='y' type=dtype('uint32') shape=[3]
    Pow(x, y) -> z
    output: name='z' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_pow_types_float32_uint64_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Pow(15) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Pow(15) node with name '''
    opset: domain='' version=15
    input: name='x' type=dtype('float32') shape=[3]
    input: name='y' type=dtype('uint64') shape=[3]
    Pow(x, y) -> z
    output: name='z' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceL1(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Abs(data) -> ReduceL1_test_reduce_l1_default_axes_keepdims_example_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_default_axes_keepdims_example_expanded_function_data_abs, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceL1(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Abs(data) -> ReduceL1_test_reduce_l1_default_axes_keepdims_random_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_default_axes_keepdims_random_expanded_function_data_abs, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL1(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Abs(data) -> ReduceL1_test_reduce_l1_do_not_keepdims_example_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_do_not_keepdims_example_expanded_function_data_abs, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL1(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Abs(data) -> ReduceL1_test_reduce_l1_do_not_keepdims_random_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_do_not_keepdims_random_expanded_function_data_abs, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL1(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Abs(data) -> ReduceL1_test_reduce_l1_keep_dims_example_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_keep_dims_example_expanded_function_data_abs, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL1(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Abs(data) -> ReduceL1_test_reduce_l1_keep_dims_random_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_keep_dims_random_expanded_function_data_abs, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL1(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Abs(data) -> ReduceL1_test_reduce_l1_negative_axes_keep_dims_example_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_negative_axes_keep_dims_example_expanded_function_data_abs, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL1(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Abs(data) -> ReduceL1_test_reduce_l1_negative_axes_keep_dims_random_expanded_function_data_abs
      ReduceSum(ReduceL1_test_reduce_l1_negative_axes_keep_dims_random_expanded_function_data_abs, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceL2(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Mul(data, data) -> ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_data_square, axes, keepdims=1) -> ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_default_axes_keepdims_example_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceL2(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Mul(data, data) -> ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_data_square, axes, keepdims=1) -> ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_default_axes_keepdims_random_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL2(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_data_square, axes, keepdims=0) -> ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_do_not_keepdims_example_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL2(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_data_square, axes, keepdims=0) -> ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_do_not_keepdims_random_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL2(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_data_square, axes, keepdims=1) -> ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_keep_dims_example_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL2(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_data_square, axes, keepdims=1) -> ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_keep_dims_random_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL2(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_data_square, axes, keepdims=1) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_negative_axes_keep_dims_example_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceL2(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_data_square
      ReduceSum(ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_data_square, axes, keepdims=1) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_sum_square
        Cast(ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_sum_square, to=1) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_sum_square_dbl
          Sqrt(ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_sum_square_dbl) -> ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_sqrt
            CastLike(ReduceL2_test_reduce_l2_negative_axes_keep_dims_random_expanded_function_sqrt, data) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_asc_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[2]
    ReduceLogSum(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[5].
    
    ======================================================================
    ERROR: test_reduce_log_sum_asc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[2]
    ReduceSum(data, axes, keepdims=0) -> ReduceLogSum_test_reduce_log_sum_asc_axes_expanded_function_reduced_sum
      Log(ReduceLogSum_test_reduce_log_sum_asc_axes_expanded_function_reduced_sum) -> reduced
    output: name='reduced' type=dtype('float32') shape=[5].
    
    ======================================================================
    ERROR: test_reduce_log_sum_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceLogSum(data, axes) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_default_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceSum(data, axes, keepdims=1) -> ReduceLogSum_test_reduce_log_sum_default_expanded_function_reduced_sum
      Log(ReduceLogSum_test_reduce_log_sum_default_expanded_function_reduced_sum) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_desc_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[2]
    ReduceLogSum(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_reduce_log_sum_desc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[2]
    ReduceSum(data, axes, keepdims=0) -> ReduceLogSum_test_reduce_log_sum_desc_axes_expanded_function_reduced_sum
      Log(ReduceLogSum_test_reduce_log_sum_desc_axes_expanded_function_reduced_sum) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceLogSumExp(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float64') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_data_exp, axes, keepdims=1) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceLogSumExp(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float64') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_data_exp, axes, keepdims=1) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSumExp(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_data_exp, axes, keepdims=0) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_example_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSumExp(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_data_exp, axes, keepdims=0) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_do_not_keepdims_random_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSumExp(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_data_exp, axes, keepdims=1) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_example_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSumExp(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_data_exp, axes, keepdims=1) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_keepdims_random_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSumExp(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_data_exp, axes, keepdims=1) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSumExp(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float64') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Cast(data, to=11) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_data_double
      Exp(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_data_double) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_data_exp
        ReduceSum(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_data_exp, axes, keepdims=1) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_reduced_sum
          Log(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_reduced_sum) -> ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_reduced_double
            CastLike(ReduceLogSumExp_test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_function_reduced_double, data) -> reduced
    output: name='reduced' type=dtype('float64') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_log_sum_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceLogSum(data, axes) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 5].
    
    ======================================================================
    ERROR: test_reduce_log_sum_negative_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 4, 5]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSum(data, axes, keepdims=1) -> ReduceLogSum_test_reduce_log_sum_negative_axes_expanded_function_reduced_sum
      Log(ReduceLogSum_test_reduce_log_sum_negative_axes_expanded_function_reduced_sum) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 5].
    
    ======================================================================
    ERROR: test_reduce_max_default_axes_keepdim_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    ReduceMax(data, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_max_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    ReduceMax(data, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_max_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMax(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_max_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMax(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_max_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMax(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_max_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMax(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_max_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMax(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_max_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMax(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_mean_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceMean(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_mean_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceMean(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_mean_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMean(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_mean_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMean(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_mean_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMean(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_mean_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMean(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_mean_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMean(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_mean_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMean(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_min_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    ReduceMin(data, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_min_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    ReduceMin(data, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_min_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMin(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_min_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMin(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_min_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMin(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_min_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMin(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_min_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMin(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_min_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceMin(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_prod_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    ReduceProd(data, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_prod_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    ReduceProd(data, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_prod_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceProd(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_prod_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceProd(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_prod_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceProd(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_prod_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceProd(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_prod_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceProd(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_prod_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceProd(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceSumSquare(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_default_axes_keepdims_example_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_default_axes_keepdims_example_expanded_function_data_square, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    ReduceSumSquare(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[None]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_default_axes_keepdims_random_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_default_axes_keepdims_random_expanded_function_data_square, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[1, 1, 1].
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSumSquare(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_do_not_keepdims_example_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_do_not_keepdims_example_expanded_function_data_square, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSumSquare(data, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_do_not_keepdims_random_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_do_not_keepdims_random_expanded_function_data_square, axes, keepdims=0) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSumSquare(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_keepdims_example_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_keepdims_example_expanded_function_data_square, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSumSquare(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_keepdims_random_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_keepdims_random_expanded_function_data_square, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSumSquare(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_negative_axes_keepdims_example_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_negative_axes_keepdims_example_expanded_function_data_square, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    ReduceSumSquare(data, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 2, 2]
    input: name='axes' type=dtype('int64') shape=[1]
    Mul(data, data) -> ReduceSumSquare_test_reduce_sum_square_negative_axes_keepdims_random_expanded_function_data_square
      ReduceSum(ReduceSumSquare_test_reduce_sum_square_negative_axes_keepdims_random_expanded_function_data_square, axes, keepdims=1) -> reduced
    output: name='reduced' type=dtype('float32') shape=[3, 1, 2].
    
    ======================================================================
    ERROR: test_reflect_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('int32') shape=[1, 3, 4, 5]
    input: name='pads' type=dtype('int64') shape=[8]
    Pad(x, pads, mode=b'reflect') -> y
    output: name='y' type=dtype('int32') shape=[1, 3, 6, 7].
    
    ======================================================================
    ERROR: test_relu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=0.0) -> Relu_test_relu_expanded_function_Zero
      CastLike(Relu_test_relu_expanded_function_Zero, x) -> Relu_test_relu_expanded_function_ZeroCast
        Max(x, Relu_test_relu_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, cubic_coeff_a=-0.50, exclude_outside=1, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, coordinate_transformation_mode=b'align_corners', mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, antialias=1, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 2, 2].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, coordinate_transformation_mode=b'align_corners', mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 1, 2].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, antialias=1, mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 2, 2].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 1, 2].
    
    ======================================================================
    ERROR: test_resize_downsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 1, 2].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, antialias=1, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, antialias=1, mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_linear_pytorch_half_pixel_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, coordinate_transformation_mode=b'pytorch_half_pixel', mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 1].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 1, 3].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 4]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, , , sizes, axes=[2,3], keep_aspect_ratio_policy=b'not_larger', mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 1, 2].
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_nearest_not_smaller_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 4]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, , , sizes, axes=[2,3], keep_aspect_ratio_policy=b'not_smaller', mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 2, 3].
    
    ======================================================================
    ERROR: test_resize_tf_crop_and_resize_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='roi' type=dtype('float32') shape=[4]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, roi, , sizes, axes=[2,3], coordinate_transformation_mode=b'tf_crop_and_resize', mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_tf_crop_and_resize_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='roi' type=dtype('float32') shape=[4]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, roi, , sizes, axes=[3,2], coordinate_transformation_mode=b'tf_crop_and_resize', mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_tf_crop_and_resize_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='roi' type=dtype('float32') shape=[8]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, roi, , sizes, coordinate_transformation_mode=b'tf_crop_and_resize', extrapolation_value=10.00, mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 3, 3].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, cubic_coeff_a=-0.50, exclude_outside=1, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, coordinate_transformation_mode=b'align_corners', mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_asymmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, coordinate_transformation_mode=b'asymmetric', mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, coordinate_transformation_mode=b'align_corners', mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 4, 4].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, mode=b'linear') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 4, 4].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='scales' type=dtype('float32') shape=[2]
    Resize(X, , scales, axes=[2,3], mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 4, 6].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='scales' type=dtype('float32') shape=[2]
    Resize(X, , scales, axes=[3,2], mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 4, 6].
    
    ======================================================================
    ERROR: test_resize_upsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='scales' type=dtype('float32') shape=[4]
    Resize(X, , scales, mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 4, 6].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, mode=b'cubic') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 9, 10].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, , , sizes, axes=[2,3], mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 7, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, , , sizes, axes=[3,2], mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 7, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_ceil_half_pixel_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, coordinate_transformation_mode=b'half_pixel', mode=b'nearest', nearest_mode=b'ceil') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 7, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_floor_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, coordinate_transformation_mode=b'align_corners', mode=b'nearest', nearest_mode=b'floor') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 2, 2]
    input: name='sizes' type=dtype('int64') shape=[2]
    Resize(X, , , sizes, axes=[2,3], keep_aspect_ratio_policy=b'not_smaller', mode=b'nearest') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='X' type=dtype('float32') shape=[1, 1, 4, 4]
    input: name='sizes' type=dtype('int64') shape=[4]
    Resize(X, , , sizes, coordinate_transformation_mode=b'asymmetric', mode=b'nearest', nearest_mode=b'round_prefer_ceil') -> Y
    output: name='Y' type=dtype('float32') shape=[1, 1, 8, 8].
    
    ======================================================================
    ERROR: test_scatter_elements_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[1, 5]
    input: name='indices' type=dtype('int64') shape=[1, 2]
    input: name='updates' type=dtype('float32') shape=[1, 2]
    ScatterElements(data, indices, updates, axis=1) -> y
    output: name='y' type=dtype('float32') shape=[1, 5].
    
    ======================================================================
    ERROR: test_scatter_elements_with_duplicate_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[1, 5]
    input: name='indices' type=dtype('int64') shape=[1, 2]
    input: name='updates' type=dtype('float32') shape=[1, 2]
    ScatterElements(data, indices, updates, axis=1, reduction=b'add') -> y
    output: name='y' type=dtype('float32') shape=[1, 5].
    
    ======================================================================
    ERROR: test_scatter_elements_with_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[1, 5]
    input: name='indices' type=dtype('int64') shape=[1, 2]
    input: name='updates' type=dtype('float32') shape=[1, 2]
    ScatterElements(data, indices, updates, axis=1) -> y
    output: name='y' type=dtype('float32') shape=[1, 5].
    
    ======================================================================
    ERROR: test_scatter_elements_with_reduction_max_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[1, 5]
    input: name='indices' type=dtype('int64') shape=[1, 2]
    input: name='updates' type=dtype('float32') shape=[1, 2]
    ScatterElements(data, indices, updates, axis=1, reduction=b'max') -> y
    output: name='y' type=dtype('float32') shape=[1, 5].
    
    ======================================================================
    ERROR: test_scatter_elements_with_reduction_min_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[1, 5]
    input: name='indices' type=dtype('int64') shape=[1, 2]
    input: name='updates' type=dtype('float32') shape=[1, 2]
    ScatterElements(data, indices, updates, axis=1, reduction=b'min') -> y
    output: name='y' type=dtype('float32') shape=[1, 5].
    
    ======================================================================
    ERROR: test_scatter_elements_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[3, 3]
    input: name='indices' type=dtype('int64') shape=[2, 3]
    input: name='updates' type=dtype('float32') shape=[2, 3]
    ScatterElements(data, indices, updates) -> y
    output: name='y' type=dtype('float32') shape=[3, 3].
    
    ======================================================================
    ERROR: test_scatternd_add_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[4, 4, 4]
    input: name='indices' type=dtype('int64') shape=[2, 1]
    input: name='updates' type=dtype('float32') shape=[2, 4, 4]
    ScatterND(data, indices, updates, reduction=b'add') -> y
    output: name='y' type=dtype('float32') shape=[4, 4, 4].
    
    ======================================================================
    ERROR: test_scatternd_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[4, 4, 4]
    input: name='indices' type=dtype('int64') shape=[2, 1]
    input: name='updates' type=dtype('float32') shape=[2, 4, 4]
    ScatterND(data, indices, updates) -> y
    output: name='y' type=dtype('float32') shape=[4, 4, 4].
    
    ======================================================================
    ERROR: test_scatternd_max_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[4, 4, 4]
    input: name='indices' type=dtype('int64') shape=[2, 1]
    input: name='updates' type=dtype('float32') shape=[2, 4, 4]
    ScatterND(data, indices, updates, reduction=b'max') -> y
    output: name='y' type=dtype('float32') shape=[4, 4, 4].
    
    ======================================================================
    ERROR: test_scatternd_min_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[4, 4, 4]
    input: name='indices' type=dtype('int64') shape=[2, 1]
    input: name='updates' type=dtype('float32') shape=[2, 4, 4]
    ScatterND(data, indices, updates, reduction=b'min') -> y
    output: name='y' type=dtype('float32') shape=[4, 4, 4].
    
    ======================================================================
    ERROR: test_scatternd_multiply_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='data' type=dtype('float32') shape=[4, 4, 4]
    input: name='indices' type=dtype('int64') shape=[2, 1]
    input: name='updates' type=dtype('float32') shape=[2, 4, 4]
    ScatterND(data, indices, updates, reduction=b'mul') -> y
    output: name='y' type=dtype('float32') shape=[4, 4, 4].
    
    ======================================================================
    ERROR: test_selu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=1.6732631921768188) -> Selu_test_selu_default_expanded_function_Alpha
      CastLike(Selu_test_selu_default_expanded_function_Alpha, x) -> Selu_test_selu_default_expanded_function_AlphaCast
    Constant(value_float=1.0507010221481323) -> Selu_test_selu_default_expanded_function_Gamma
      CastLike(Selu_test_selu_default_expanded_function_Gamma, x) -> Selu_test_selu_default_expanded_function_GammaCast
        Mul(Selu_test_selu_default_expanded_function_GammaCast, x) -> Selu_test_selu_default_expanded_function_Pos
    Constant(value=0.0) -> Selu_test_selu_default_expanded_function_Zero
      CastLike(Selu_test_selu_default_expanded_function_Zero, x) -> Selu_test_selu_default_expanded_function_ZeroCast
        Less(x, Selu_test_selu_default_expanded_function_ZeroCast) -> Selu_test_selu_default_expanded_function_XLessThanZero
    Exp(x) -> Selu_test_selu_default_expanded_function_ExpX
      Mul(Selu_test_selu_default_expanded_function_AlphaCast, Selu_test_selu_default_expanded_function_ExpX) -> Selu_test_selu_default_expanded_function_AlphaMulExpX
        Sub(Selu_test_selu_default_expanded_function_AlphaMulExpX, Selu_test_selu_default_expanded_function_AlphaCast) -> Selu_test_selu_default_expanded_function_AlphaMulExpXSubAlpha
        Mul(Selu_test_selu_default_expanded_function_GammaCast, Selu_test_selu_default_expanded_function_AlphaMulExpXSubAlpha) -> Selu_test_selu_default_expanded_function_Neg
          Where(Selu_test_selu_default_expanded_function_XLessThanZero, Selu_test_selu_default_expanded_function_Neg, Selu_test_selu_default_expanded_function_Pos) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_selu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3]
    Constant(value_float=2.0) -> Selu_test_selu_example_expanded_function_Alpha
      CastLike(Selu_test_selu_example_expanded_function_Alpha, x) -> Selu_test_selu_example_expanded_function_AlphaCast
    Constant(value_float=3.0) -> Selu_test_selu_example_expanded_function_Gamma
      CastLike(Selu_test_selu_example_expanded_function_Gamma, x) -> Selu_test_selu_example_expanded_function_GammaCast
        Mul(Selu_test_selu_example_expanded_function_GammaCast, x) -> Selu_test_selu_example_expanded_function_Pos
    Constant(value=0.0) -> Selu_test_selu_example_expanded_function_Zero
      CastLike(Selu_test_selu_example_expanded_function_Zero, x) -> Selu_test_selu_example_expanded_function_ZeroCast
        Less(x, Selu_test_selu_example_expanded_function_ZeroCast) -> Selu_test_selu_example_expanded_function_XLessThanZero
    Exp(x) -> Selu_test_selu_example_expanded_function_ExpX
      Mul(Selu_test_selu_example_expanded_function_AlphaCast, Selu_test_selu_example_expanded_function_ExpX) -> Selu_test_selu_example_expanded_function_AlphaMulExpX
        Sub(Selu_test_selu_example_expanded_function_AlphaMulExpX, Selu_test_selu_example_expanded_function_AlphaCast) -> Selu_test_selu_example_expanded_function_AlphaMulExpXSubAlpha
        Mul(Selu_test_selu_example_expanded_function_GammaCast, Selu_test_selu_example_expanded_function_AlphaMulExpXSubAlpha) -> Selu_test_selu_example_expanded_function_Neg
          Where(Selu_test_selu_example_expanded_function_XLessThanZero, Selu_test_selu_example_expanded_function_Neg, Selu_test_selu_example_expanded_function_Pos) -> y
    output: name='y' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_selu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=2.0) -> Selu_test_selu_expanded_function_Alpha
      CastLike(Selu_test_selu_expanded_function_Alpha, x) -> Selu_test_selu_expanded_function_AlphaCast
    Constant(value_float=3.0) -> Selu_test_selu_expanded_function_Gamma
      CastLike(Selu_test_selu_expanded_function_Gamma, x) -> Selu_test_selu_expanded_function_GammaCast
        Mul(Selu_test_selu_expanded_function_GammaCast, x) -> Selu_test_selu_expanded_function_Pos
    Constant(value=0.0) -> Selu_test_selu_expanded_function_Zero
      CastLike(Selu_test_selu_expanded_function_Zero, x) -> Selu_test_selu_expanded_function_ZeroCast
        Less(x, Selu_test_selu_expanded_function_ZeroCast) -> Selu_test_selu_expanded_function_XLessThanZero
    Exp(x) -> Selu_test_selu_expanded_function_ExpX
      Mul(Selu_test_selu_expanded_function_AlphaCast, Selu_test_selu_expanded_function_ExpX) -> Selu_test_selu_expanded_function_AlphaMulExpX
        Sub(Selu_test_selu_expanded_function_AlphaMulExpX, Selu_test_selu_expanded_function_AlphaCast) -> Selu_test_selu_expanded_function_AlphaMulExpXSubAlpha
        Mul(Selu_test_selu_expanded_function_GammaCast, Selu_test_selu_expanded_function_AlphaMulExpXSubAlpha) -> Selu_test_selu_expanded_function_Neg
          Where(Selu_test_selu_expanded_function_XLessThanZero, Selu_test_selu_expanded_function_Neg, Selu_test_selu_expanded_function_Pos) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_sequence_insert_at_back_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_insert_at_front_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_add_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_add_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_add_2_sequences_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_add_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_extract_shapes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_extract_shapes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_identity_2_sequences_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_sequence_map_identity_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 119, in run
        return self.sess._sess.run_with_ort_values(
    RuntimeError: Unable to cast Python instance to C++ type (compile in debug mode for details)
    
    During handling of the above exception, another exception occurred:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 896, in run
        return self._run(inputs, clean_right_away=False,  # pylint: disable=E1123
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1401, in _run_whole_runtime
        res = self._whole.run(inputs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in run
        {k: v._get_c_value() for k, v in inputs.items()},
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 123, in <dictcomp>
        {k: v._get_c_value() for k, v in inputs.items()},
    AttributeError: 'list' object has no attribute '_get_c_value'
    
    ======================================================================
    ERROR: test_shrink_hard_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[5]
    Constant(value_float=1.5) -> Shrink_test_shrink_hard_expanded_function_Lambd
      CastLike(Shrink_test_shrink_hard_expanded_function_Lambd, x) -> Shrink_test_shrink_hard_expanded_function_LambdCast
        Neg(Shrink_test_shrink_hard_expanded_function_LambdCast) -> Shrink_test_shrink_hard_expanded_function_NegLmbda
          Less(x, Shrink_test_shrink_hard_expanded_function_NegLmbda) -> Shrink_test_shrink_hard_expanded_function_InputLessThanNegLambda
    Constant(value_float=0.0) -> Shrink_test_shrink_hard_expanded_function_Bias
      CastLike(Shrink_test_shrink_hard_expanded_function_Bias, x) -> Shrink_test_shrink_hard_expanded_function_BiasCast
        Add(x, Shrink_test_shrink_hard_expanded_function_BiasCast) -> Shrink_test_shrink_hard_expanded_function_InputAddBias
    Constant(value=0.0) -> Shrink_test_shrink_hard_expanded_function_Zero
      CastLike(Shrink_test_shrink_hard_expanded_function_Zero, x) -> Shrink_test_shrink_hard_expanded_function_ZeroCast
    Sub(x, Shrink_test_shrink_hard_expanded_function_BiasCast) -> Shrink_test_shrink_hard_expanded_function_InputSubBias
    Less(Shrink_test_shrink_hard_expanded_function_LambdCast, x) -> Shrink_test_shrink_hard_expanded_function_LambdaLessThanInput
      Where(Shrink_test_shrink_hard_expanded_function_LambdaLessThanInput, Shrink_test_shrink_hard_expanded_function_InputSubBias, Shrink_test_shrink_hard_expanded_function_ZeroCast) -> Shrink_test_shrink_hard_expanded_function_InputSubBiasOrZero
        Where(Shrink_test_shrink_hard_expanded_function_InputLessThanNegLambda, Shrink_test_shrink_hard_expanded_function_InputAddBias, Shrink_test_shrink_hard_expanded_function_InputSubBiasOrZero) -> y
    output: name='y' type=dtype('float32') shape=[5].
    
    ======================================================================
    ERROR: test_shrink_soft_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[5]
    Constant(value_float=1.5) -> Shrink_test_shrink_soft_expanded_function_Lambd
      CastLike(Shrink_test_shrink_soft_expanded_function_Lambd, x) -> Shrink_test_shrink_soft_expanded_function_LambdCast
        Neg(Shrink_test_shrink_soft_expanded_function_LambdCast) -> Shrink_test_shrink_soft_expanded_function_NegLmbda
          Less(x, Shrink_test_shrink_soft_expanded_function_NegLmbda) -> Shrink_test_shrink_soft_expanded_function_InputLessThanNegLambda
    Constant(value_float=1.5) -> Shrink_test_shrink_soft_expanded_function_Bias
      CastLike(Shrink_test_shrink_soft_expanded_function_Bias, x) -> Shrink_test_shrink_soft_expanded_function_BiasCast
        Add(x, Shrink_test_shrink_soft_expanded_function_BiasCast) -> Shrink_test_shrink_soft_expanded_function_InputAddBias
    Constant(value=0.0) -> Shrink_test_shrink_soft_expanded_function_Zero
      CastLike(Shrink_test_shrink_soft_expanded_function_Zero, x) -> Shrink_test_shrink_soft_expanded_function_ZeroCast
    Sub(x, Shrink_test_shrink_soft_expanded_function_BiasCast) -> Shrink_test_shrink_soft_expanded_function_InputSubBias
    Less(Shrink_test_shrink_soft_expanded_function_LambdCast, x) -> Shrink_test_shrink_soft_expanded_function_LambdaLessThanInput
      Where(Shrink_test_shrink_soft_expanded_function_LambdaLessThanInput, Shrink_test_shrink_soft_expanded_function_InputSubBias, Shrink_test_shrink_soft_expanded_function_ZeroCast) -> Shrink_test_shrink_soft_expanded_function_InputSubBiasOrZero
        Where(Shrink_test_shrink_soft_expanded_function_InputLessThanNegLambda, Shrink_test_shrink_soft_expanded_function_InputAddBias, Shrink_test_shrink_soft_expanded_function_InputSubBiasOrZero) -> y
    output: name='y' type=dtype('float32') shape=[5].
    
    ======================================================================
    ERROR: test_simple_rnn_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.RuntimeException: [ONNXRuntimeError] : 6 : RUNTIME_EXCEPTION : Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/rnn.h:45 onnxruntime::RNN<T>::RNN(const onnxruntime::OpKernelInfo&) [with T = float] layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 6 : RUNTIME_EXCEPTION : Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/rnn.h:45 onnxruntime::RNN<T>::RNN(const onnxruntime::OpKernelInfo&) [with T = float] layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    '
    opset: domain='' version=14
    input: name='X' type=dtype('float32') shape=[3, 1, 2]
    input: name='W' type=dtype('float32') shape=[1, 4, 2]
    input: name='R' type=dtype('float32') shape=[1, 4, 4]
    RNN(X, W, R, hidden_size=4, layout=1) -> Y, Y_h
    output: name='Y' type=dtype('float32') shape=[3, 1, 1, 4]
    output: name='Y_h' type=dtype('float32') shape=[3, 1, 4].
    
    ======================================================================
    ERROR: test_softmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[0]) -> Softmax_test_softmax_axis_0_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_axis_0_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_axis_0_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_axis_0_expanded_function_X_ReduceMax) -> Softmax_test_softmax_axis_0_expanded_function_X_Sub
          Exp(Softmax_test_softmax_axis_0_expanded_function_X_Sub) -> Softmax_test_softmax_axis_0_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_axis_0_expanded_function_X_Exp, Softmax_test_softmax_axis_0_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_axis_0_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_axis_0_expanded_function_X_Exp, Softmax_test_softmax_axis_0_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_softmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[1]) -> Softmax_test_softmax_axis_1_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_axis_1_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_axis_1_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_axis_1_expanded_function_X_ReduceMax) -> Softmax_test_softmax_axis_1_expanded_function_X_Sub
          Exp(Softmax_test_softmax_axis_1_expanded_function_X_Sub) -> Softmax_test_softmax_axis_1_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_axis_1_expanded_function_X_Exp, Softmax_test_softmax_axis_1_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_axis_1_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_axis_1_expanded_function_X_Exp, Softmax_test_softmax_axis_1_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_softmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[2]) -> Softmax_test_softmax_axis_2_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_axis_2_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_axis_2_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_axis_2_expanded_function_X_ReduceMax) -> Softmax_test_softmax_axis_2_expanded_function_X_Sub
          Exp(Softmax_test_softmax_axis_2_expanded_function_X_Sub) -> Softmax_test_softmax_axis_2_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_axis_2_expanded_function_X_Exp, Softmax_test_softmax_axis_2_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_axis_2_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_axis_2_expanded_function_X_Exp, Softmax_test_softmax_axis_2_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_softmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[-1]) -> Softmax_test_softmax_default_axis_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_default_axis_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_default_axis_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_default_axis_expanded_function_X_ReduceMax) -> Softmax_test_softmax_default_axis_expanded_function_X_Sub
          Exp(Softmax_test_softmax_default_axis_expanded_function_X_Sub) -> Softmax_test_softmax_default_axis_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_default_axis_expanded_function_X_Exp, Softmax_test_softmax_default_axis_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_default_axis_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_default_axis_expanded_function_X_Exp, Softmax_test_softmax_default_axis_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_softmax_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[1, 3]
    Constant(value=[-1]) -> Softmax_test_softmax_example_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_example_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_example_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_example_expanded_function_X_ReduceMax) -> Softmax_test_softmax_example_expanded_function_X_Sub
          Exp(Softmax_test_softmax_example_expanded_function_X_Sub) -> Softmax_test_softmax_example_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_example_expanded_function_X_Exp, Softmax_test_softmax_example_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_example_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_example_expanded_function_X_Exp, Softmax_test_softmax_example_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[1, 3].
    
    ======================================================================
    ERROR: test_softmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[2, 4]
    Constant(value=[-1]) -> Softmax_test_softmax_large_number_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_large_number_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_large_number_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_large_number_expanded_function_X_ReduceMax) -> Softmax_test_softmax_large_number_expanded_function_X_Sub
          Exp(Softmax_test_softmax_large_number_expanded_function_X_Sub) -> Softmax_test_softmax_large_number_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_large_number_expanded_function_X_Exp, Softmax_test_softmax_large_number_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_large_number_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_large_number_expanded_function_X_Exp, Softmax_test_softmax_large_number_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[2, 4].
    
    ======================================================================
    ERROR: test_softmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=[-1]) -> Softmax_test_softmax_negative_axis_expanded_function_axes
      ReduceMax(x, Softmax_test_softmax_negative_axis_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_negative_axis_expanded_function_X_ReduceMax
        Sub(x, Softmax_test_softmax_negative_axis_expanded_function_X_ReduceMax) -> Softmax_test_softmax_negative_axis_expanded_function_X_Sub
          Exp(Softmax_test_softmax_negative_axis_expanded_function_X_Sub) -> Softmax_test_softmax_negative_axis_expanded_function_X_Exp
      ReduceSum(Softmax_test_softmax_negative_axis_expanded_function_X_Exp, Softmax_test_softmax_negative_axis_expanded_function_axes, keepdims=1) -> Softmax_test_softmax_negative_axis_expanded_function_X_ReduceSum
        Div(Softmax_test_softmax_negative_axis_expanded_function_X_Exp, Softmax_test_softmax_negative_axis_expanded_function_X_ReduceSum) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_softplus_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Exp(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Exp(1) node with name '''
    opset: domain='' version=1
    input: name='x' type=dtype('float32') shape=[3]
    Constant(value=1.0) -> Softplus_test_softplus_example_expanded_function_one
    Exp(x) -> Softplus_test_softplus_example_expanded_function_exp_x
      Add(Softplus_test_softplus_example_expanded_function_exp_x, Softplus_test_softplus_example_expanded_function_one) -> Softplus_test_softplus_example_expanded_function_exp_x_add_one
        Log(Softplus_test_softplus_example_expanded_function_exp_x_add_one) -> y
    output: name='y' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_softplus_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Exp(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Exp(1) node with name '''
    opset: domain='' version=1
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value=1.0) -> Softplus_test_softplus_expanded_function_one
    Exp(x) -> Softplus_test_softplus_expanded_function_exp_x
      Add(Softplus_test_softplus_expanded_function_exp_x, Softplus_test_softplus_expanded_function_one) -> Softplus_test_softplus_expanded_function_exp_x_add_one
        Log(Softplus_test_softplus_expanded_function_exp_x_add_one) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_softsign_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3]
    Abs(x) -> Softsign_test_softsign_example_expanded_function_AbsInput
    Constant(value=1.0) -> Softsign_test_softsign_example_expanded_function_One
      CastLike(Softsign_test_softsign_example_expanded_function_One, x) -> Softsign_test_softsign_example_expanded_function_OneCast
      Add(Softsign_test_softsign_example_expanded_function_OneCast, Softsign_test_softsign_example_expanded_function_AbsInput) -> Softsign_test_softsign_example_expanded_function_OneAddAbsInput
        Div(x, Softsign_test_softsign_example_expanded_function_OneAddAbsInput) -> y
    output: name='y' type=dtype('float32') shape=[3].
    
    ======================================================================
    ERROR: test_softsign_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Abs(x) -> Softsign_test_softsign_expanded_function_AbsInput
    Constant(value=1.0) -> Softsign_test_softsign_expanded_function_One
      CastLike(Softsign_test_softsign_expanded_function_One, x) -> Softsign_test_softsign_expanded_function_OneCast
      Add(Softsign_test_softsign_expanded_function_OneCast, Softsign_test_softsign_expanded_function_AbsInput) -> Softsign_test_softsign_expanded_function_OneAddAbsInput
        Div(x, Softsign_test_softsign_expanded_function_OneAddAbsInput) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_split_1d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[7]
    Split(input, num_outputs=4) -> output_1, output_2, output_3, output_4
    output: name='output_1' type=dtype('float32') shape=[2]
    output: name='output_2' type=dtype('float32') shape=[2]
    output: name='output_3' type=dtype('float32') shape=[2]
    output: name='output_4' type=dtype('float32') shape=[1].
    
    ======================================================================
    ERROR: test_split_2d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[2, 8]
    Split(input, axis=1, num_outputs=3) -> output_1, output_2, output_3
    output: name='output_1' type=dtype('float32') shape=[2, 3]
    output: name='output_2' type=dtype('float32') shape=[2, 3]
    output: name='output_3' type=dtype('float32') shape=[2, 2].
    
    ======================================================================
    ERROR: test_split_equal_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[6]
    Split(input, axis=0, num_outputs=3) -> output_1, output_2, output_3
    output: name='output_1' type=dtype('float32') shape=[2]
    output: name='output_2' type=dtype('float32') shape=[2]
    output: name='output_3' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_split_equal_parts_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[2, 6]
    Split(input, axis=1, num_outputs=2) -> output_1, output_2
    output: name='output_1' type=dtype('float32') shape=[2, 3]
    output: name='output_2' type=dtype('float32') shape=[2, 3].
    
    ======================================================================
    ERROR: test_split_equal_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[6]
    Split(input, num_outputs=3) -> output_1, output_2, output_3
    output: name='output_1' type=dtype('float32') shape=[2]
    output: name='output_2' type=dtype('float32') shape=[2]
    output: name='output_3' type=dtype('float32') shape=[2].
    
    ======================================================================
    ERROR: test_split_variable_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[6]
    input: name='split' type=dtype('int64') shape=[2]
    Split(input, split, axis=0) -> output_1, output_2
    output: name='output_1' type=dtype('float32') shape=[2]
    output: name='output_2' type=dtype('float32') shape=[4].
    
    ======================================================================
    ERROR: test_split_variable_parts_2d_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[2, 6]
    input: name='split' type=dtype('int64') shape=[2]
    Split(input, split, axis=1) -> output_1, output_2
    output: name='output_1' type=dtype('float32') shape=[2, 2]
    output: name='output_2' type=dtype('float32') shape=[2, 4].
    
    ======================================================================
    ERROR: test_split_variable_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[6]
    input: name='split' type=dtype('int64') shape=[2]
    Split(input, split) -> output_1, output_2
    output: name='output_1' type=dtype('float32') shape=[2]
    output: name='output_2' type=dtype('float32') shape=[4].
    
    ======================================================================
    ERROR: test_split_zero_size_splits_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='input' type=dtype('float32') shape=[None]
    input: name='split' type=dtype('int64') shape=[3]
    Split(input, split) -> output_1, output_2, output_3
    output: name='output_1' type=dtype('float32') shape=[None]
    output: name='output_2' type=dtype('float32') shape=[None]
    output: name='output_3' type=dtype('float32') shape=[None].
    
    ======================================================================
    ERROR: test_sub_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Sub(14) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Sub(14) node with name '''
    opset: domain='' version=14
    input: name='x' type=dtype('uint8') shape=[3, 4, 5]
    input: name='y' type=dtype('uint8') shape=[3, 4, 5]
    Sub(x, y) -> z
    output: name='z' type=dtype('uint8') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_thresholdedrelu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=1.0) -> ThresholdedRelu_test_thresholdedrelu_default_expanded_function_Alpha
      CastLike(ThresholdedRelu_test_thresholdedrelu_default_expanded_function_Alpha, x) -> ThresholdedRelu_test_thresholdedrelu_default_expanded_function_AlphaCast
        Less(ThresholdedRelu_test_thresholdedrelu_default_expanded_function_AlphaCast, x) -> ThresholdedRelu_test_thresholdedrelu_default_expanded_function_AlphaLessThanX
    Constant(value=0.0) -> ThresholdedRelu_test_thresholdedrelu_default_expanded_function_Zero
      CastLike(ThresholdedRelu_test_thresholdedrelu_default_expanded_function_Zero, x) -> ThresholdedRelu_test_thresholdedrelu_default_expanded_function_ZeroCast
        Where(ThresholdedRelu_test_thresholdedrelu_default_expanded_function_AlphaLessThanX, x, ThresholdedRelu_test_thresholdedrelu_default_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_thresholdedrelu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[5]
    Constant(value_float=2.0) -> ThresholdedRelu_test_thresholdedrelu_example_expanded_function_Alpha
      CastLike(ThresholdedRelu_test_thresholdedrelu_example_expanded_function_Alpha, x) -> ThresholdedRelu_test_thresholdedrelu_example_expanded_function_AlphaCast
        Less(ThresholdedRelu_test_thresholdedrelu_example_expanded_function_AlphaCast, x) -> ThresholdedRelu_test_thresholdedrelu_example_expanded_function_AlphaLessThanX
    Constant(value=0.0) -> ThresholdedRelu_test_thresholdedrelu_example_expanded_function_Zero
      CastLike(ThresholdedRelu_test_thresholdedrelu_example_expanded_function_Zero, x) -> ThresholdedRelu_test_thresholdedrelu_example_expanded_function_ZeroCast
        Where(ThresholdedRelu_test_thresholdedrelu_example_expanded_function_AlphaLessThanX, x, ThresholdedRelu_test_thresholdedrelu_example_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[5].
    
    ======================================================================
    ERROR: test_thresholdedrelu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.InvalidArgument: [ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 2 : INVALID_ARGUMENT : Failed to load model with error: /onnxruntime_src/onnxruntime/core/graph/model_load_utils.h:57 void onnxruntime::model_load_utils::ValidateOpsetForDomain(const std::unordered_map<std::__cxx11::basic_string<char>, int>&, const onnxruntime::logging::Logger&, bool, const string&, int) ONNX Runtime only *guarantees* support for models stamped with official released onnx opset versions. Opset 18 is under development and support for this is limited. The operator schemas and or other functionality may change before next ONNX release and in this case ONNX Runtime will not guarantee backward compatibility. Current official support for domain ai.onnx is till opset 17.
    '
    opset: domain='' version=18
    input: name='x' type=dtype('float32') shape=[3, 4, 5]
    Constant(value_float=2.0) -> ThresholdedRelu_test_thresholdedrelu_expanded_function_Alpha
      CastLike(ThresholdedRelu_test_thresholdedrelu_expanded_function_Alpha, x) -> ThresholdedRelu_test_thresholdedrelu_expanded_function_AlphaCast
        Less(ThresholdedRelu_test_thresholdedrelu_expanded_function_AlphaCast, x) -> ThresholdedRelu_test_thresholdedrelu_expanded_function_AlphaLessThanX
    Constant(value=0.0) -> ThresholdedRelu_test_thresholdedrelu_expanded_function_Zero
      CastLike(ThresholdedRelu_test_thresholdedrelu_expanded_function_Zero, x) -> ThresholdedRelu_test_thresholdedrelu_expanded_function_ZeroCast
        Where(ThresholdedRelu_test_thresholdedrelu_expanded_function_AlphaLessThanX, x, ThresholdedRelu_test_thresholdedrelu_expanded_function_ZeroCast) -> y
    output: name='y' type=dtype('float32') shape=[3, 4, 5].
    
    ======================================================================
    ERROR: test_AvgPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 6]
    Unsqueeze(0, axes=[3]) -> 1
      AveragePool(1, kernel_shape=[2,1], pads=[0,0,0,0], strides=[2,1]) -> 2
        Squeeze(2, axes=[3]) -> 3
    output: name='3' type=dtype('float32') shape=[2, 3, 3].
    
    ======================================================================
    ERROR: test_AvgPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 6]
    Unsqueeze(0, axes=[3]) -> 1
      AveragePool(1, kernel_shape=[2,1], pads=[0,0,0,0], strides=[2,1]) -> 2
        Squeeze(2, axes=[3]) -> 3
    output: name='3' type=dtype('float32') shape=[2, 3, 3].
    
    ======================================================================
    ERROR: test_AvgPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 6, 6]
    AveragePool(0, kernel_shape=[2,2], pads=[0,0,0,0], strides=[2,2]) -> 1
    output: name='1' type=dtype('float32') shape=[2, 3, 3, 3].
    
    ======================================================================
    ERROR: test_AvgPool2d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 6, 6]
    AveragePool(0, kernel_shape=[2,2], pads=[0,0,0,0], strides=[2,2]) -> 1
    output: name='1' type=dtype('float32') shape=[2, 3, 3, 3].
    
    ======================================================================
    ERROR: test_AvgPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 4, 4]
    AveragePool(0, kernel_shape=[2,2,2], pads=[0,0,0,0,0,0], strides=[2,2,2]) -> 1
    output: name='1' type=dtype('float32') shape=[2, 3, 2, 2, 2].
    
    ======================================================================
    ERROR: test_AvgPool3d_stride1_pad0_gpu_input_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 4, 4]
    AveragePool(0, kernel_shape=[3,3,3], pads=[0,0,0,0,0,0], strides=[1,1,1]) -> 1
    output: name='1' type=dtype('float32') shape=[2, 3, 2, 2, 2].
    
    ======================================================================
    ERROR: test_AvgPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for AveragePool(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 5, 5, 5]
    AveragePool(0, kernel_shape=[2,2,2], pads=[0,0,0,0,0,0], strides=[2,2,2]) -> 1
    output: name='1' type=dtype('float32') shape=[2, 3, 2, 2, 2].
    
    ======================================================================
    ERROR: test_BatchNorm1d_3d_input_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[4, 5, 3]
    input: name='1' type=dtype('float32') shape=[5]
    input: name='2' type=dtype('float32') shape=[5]
    input: name='3' type=dtype('float32') shape=[5]
    input: name='4' type=dtype('float32') shape=[5]
    init: name='1' type=dtype('float32') shape=(5,)
    init: name='2' type=dtype('float32') shape=(5,)
    init: name='3' type=dtype('float32') shape=(5,)
    init: name='4' type=dtype('float32') shape=(5,)
    BatchNormalization(0, 1, 2, 3, 4, epsilon=0.00, is_test=1, momentum=0.90) -> 5
    output: name='5' type=dtype('float32') shape=[4, 5, 3].
    
    ======================================================================
    ERROR: test_BatchNorm2d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 6, 6]
    input: name='1' type=dtype('float32') shape=[3]
    input: name='2' type=dtype('float32') shape=[3]
    input: name='3' type=dtype('float32') shape=[3]
    input: name='4' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.736, 0.58 , 0.375], dtype=float32)
    init: name='2' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='3' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='4' type=dtype('float32') shape=(3,) -- array([1., 1., 1.], dtype=float32)
    BatchNormalization(0, 1, 2, 3, 4, epsilon=0.00, is_test=1, momentum=0.90) -> 5
    output: name='5' type=dtype('float32') shape=[2, 3, 6, 6].
    
    ======================================================================
    ERROR: test_BatchNorm2d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 6, 6]
    input: name='1' type=dtype('float32') shape=[3]
    input: name='2' type=dtype('float32') shape=[3]
    input: name='3' type=dtype('float32') shape=[3]
    input: name='4' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.532, 0.746, 0.765], dtype=float32)
    init: name='2' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='3' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='4' type=dtype('float32') shape=(3,) -- array([1., 1., 1.], dtype=float32)
    BatchNormalization(0, 1, 2, 3, 4, epsilon=0.00, is_test=1, momentum=0.20) -> 5
    output: name='5' type=dtype('float32') shape=[2, 3, 6, 6].
    
    ======================================================================
    ERROR: test_BatchNorm3d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 4, 4]
    input: name='1' type=dtype('float32') shape=[3]
    input: name='2' type=dtype('float32') shape=[3]
    input: name='3' type=dtype('float32') shape=[3]
    input: name='4' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.242, 0.961, 0.475], dtype=float32)
    init: name='2' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='3' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='4' type=dtype('float32') shape=(3,) -- array([1., 1., 1.], dtype=float32)
    BatchNormalization(0, 1, 2, 3, 4, epsilon=0.00, is_test=1, momentum=0.90) -> 5
    output: name='5' type=dtype('float32') shape=[2, 3, 4, 4, 4].
    
    ======================================================================
    ERROR: test_BatchNorm3d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for BatchNormalization(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 4, 4]
    input: name='1' type=dtype('float32') shape=[3]
    input: name='2' type=dtype('float32') shape=[3]
    input: name='3' type=dtype('float32') shape=[3]
    input: name='4' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.452, 0.16 , 0.689], dtype=float32)
    init: name='2' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='3' type=dtype('float32') shape=(3,) -- array([0., 0., 0.], dtype=float32)
    init: name='4' type=dtype('float32') shape=(3,) -- array([1., 1., 1.], dtype=float32)
    BatchNormalization(0, 1, 2, 3, 4, epsilon=0.00, is_test=1, momentum=0.30) -> 5
    output: name='5' type=dtype('float32') shape=[2, 3, 4, 4, 4].
    
    ======================================================================
    ERROR: test_GLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[5, 6]
    Split(0, axis=-1) -> 1, 2
      Sigmoid(2) -> 3
      Mul(1, 3) -> 4
    output: name='4' type=dtype('float32') shape=[5, 3].
    
    ======================================================================
    ERROR: test_GLU_dim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[5, 6, 7]
    Split(0, axis=1) -> 1, 2
      Sigmoid(2) -> 3
      Mul(1, 3) -> 4
    output: name='4' type=dtype('float32') shape=[5, 3, 7].
    
    ======================================================================
    ERROR: test_Linear_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Gemm(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Gemm(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[4, 10]
    input: name='1' type=dtype('float32') shape=[8, 10]
    input: name='2' type=dtype('float32') shape=[8]
    init: name='1' type=dtype('float32') shape=(8, 10)
    init: name='2' type=dtype('float32') shape=(8,)
    Gemm(0, 1, 2, alpha=1.00, beta=1.00, broadcast=1, transB=1) -> 3
    output: name='3' type=dtype('float32') shape=[4, 8].
    
    ======================================================================
    ERROR: test_PReLU_1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4]
    input: name='1' type=dtype('float32') shape=[1]
    init: name='1' type=dtype('float32') shape=(1,) -- array([0.25], dtype=float32)
    PRelu(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[2, 3, 4].
    
    ======================================================================
    ERROR: test_PReLU_1d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4]
    input: name='1' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.25, 0.25, 0.25], dtype=float32)
    PRelu(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[2, 3, 4].
    
    ======================================================================
    ERROR: test_PReLU_2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='1' type=dtype('float32') shape=[1]
    init: name='1' type=dtype('float32') shape=(1,) -- array([0.25], dtype=float32)
    PRelu(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[2, 3, 4, 5].
    
    ======================================================================
    ERROR: test_PReLU_2d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 5]
    input: name='1' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.25, 0.25, 0.25], dtype=float32)
    PRelu(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[2, 3, 4, 5].
    
    ======================================================================
    ERROR: test_PReLU_3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 5, 6]
    input: name='1' type=dtype('float32') shape=[1]
    init: name='1' type=dtype('float32') shape=(1,) -- array([0.25], dtype=float32)
    PRelu(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[2, 3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_PReLU_3d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for PRelu(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3, 4, 5, 6]
    input: name='1' type=dtype('float32') shape=[3]
    init: name='1' type=dtype('float32') shape=(3,) -- array([0.25, 0.25, 0.25], dtype=float32)
    PRelu(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[2, 3, 4, 5, 6].
    
    ======================================================================
    ERROR: test_PoissonNLLLLoss_no_reduce_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Mul(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[10, 10]
    Constant(value=[[-1.38804...) -> 1
      Mul(1, 0) -> 3
    Exp(0) -> 2
      Sub(2, 3) -> 4
    output: name='4' type=dtype('float32') shape=[10, 10].
    
    ======================================================================
    ERROR: test_Softsign_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[3, 2, 5]
    Abs(0) -> 1
    Constant(value=1.0) -> 2
      Add(1, 2, broadcast=1) -> 3
        Div(0, 3) -> 4
    output: name='4' type=dtype('float32') shape=[3, 2, 5].
    
    ======================================================================
    ERROR: test_operator_add_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float64') shape=[2, 3]
    input: name='1' type=dtype('float64') shape=[3]
    Add(0, 1, broadcast=1, axis=1) -> 2
    output: name='2' type=dtype('float64') shape=[2, 3].
    
    ======================================================================
    ERROR: test_operator_add_size1_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float64') shape=[2, 3]
    input: name='1' type=dtype('float64') shape=[2, 1]
    Add(0, 1, broadcast=1, axis=0) -> 2
    output: name='2' type=dtype('float64') shape=[2, 3].
    
    ======================================================================
    ERROR: test_operator_add_size1_right_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float64') shape=[2, 3]
    input: name='1' type=dtype('float64') shape=[3]
    Add(0, 1, broadcast=1, axis=1) -> 2
    output: name='2' type=dtype('float64') shape=[2, 3].
    
    ======================================================================
    ERROR: test_operator_add_size1_singleton_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float64') shape=[2, 3]
    input: name='1' type=dtype('float64') shape=[1, 3]
    Add(0, 1, broadcast=1, axis=0) -> 2
    output: name='2' type=dtype('float64') shape=[2, 3].
    
    ======================================================================
    ERROR: test_operator_addconstant_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float64') shape=[2, 3]
    Constant(value=1.0) -> 1
      Add(0, 1, broadcast=1) -> 2
    output: name='2' type=dtype('float64') shape=[2, 3].
    
    ======================================================================
    ERROR: test_operator_addmm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Gemm(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Gemm(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3]
    input: name='1' type=dtype('float32') shape=[3, 4]
    input: name='2' type=dtype('float32') shape=[4]
    Gemm(0, 1, 2, alpha=1.00, beta=1.00, broadcast=1) -> 3
      Gemm(0, 1, 3, alpha=1.00, beta=1.00) -> 4
    output: name='4' type=dtype('float32') shape=[2, 4].
    
    ======================================================================
    ERROR: test_operator_basic_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[1]
    input: name='1' type=dtype('float32') shape=[1]
    Add(0, 1) -> 2
      Mul(0, 2) -> 3
        Tanh(3) -> 4
          Sigmoid(4) -> 5
            Neg(5) -> 6
    output: name='6' type=dtype('float32') shape=[1].
    
    ======================================================================
    ERROR: test_operator_mm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Gemm(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Gemm(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 3]
    input: name='1' type=dtype('float32') shape=[3, 4]
    Constant(value=[0.0]) -> 2
      Gemm(0, 1, 2, alpha=1.00, beta=0.00, broadcast=1) -> 3
    output: name='3' type=dtype('float32') shape=[2, 4].
    
    ======================================================================
    ERROR: test_operator_non_float_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('int64') shape=[2, 2]
    input: name='1' type=dtype('int64') shape=[2, 2]
    init: name='1' type=dtype('int64') shape=(2, 2) -- array([1, 2, 3, 4])
    Add(0, 1) -> 2
      Mul(0, 2) -> 3
    output: name='3' type=dtype('int64') shape=[2, 2].
    
    ======================================================================
    ERROR: test_operator_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Add(6) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[2, 2]
    input: name='1' type=dtype('float32') shape=[2, 2]
    init: name='1' type=dtype('float32') shape=(2, 2) -- array([1., 2., 3., 4.], dtype=float32)
    Add(0, 1) -> 2
      Mul(0, 2) -> 3
        Tanh(3) -> 4
          Sigmoid(4) -> 5
            Neg(5) -> 6
    output: name='6' type=dtype('float32') shape=[2, 2].
    
    ======================================================================
    ERROR: test_operator_pow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 395, in _create_inference_session
        sess.initialize_session(providers, provider_options, disabled_optimizers)
    onnxruntime.capi.onnxruntime_pybind11_state.NotImplemented: [ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Pow(1) node with name ''
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 9 : NOT_IMPLEMENTED : Could not find an implementation for Pow(1) node with name '''
    opset: domain='' version=6
    input: name='0' type=dtype('float32') shape=[1, 2, 3, 4]
    input: name='1' type=dtype('float32') shape=[1, 2, 3, 4]
    Pow(0, 1) -> 2
    output: name='2' type=dtype('float32') shape=[1, 2, 3, 4].
    
    ======================================================================
    ERROR: test_gradient_of_add_and_mul_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Gradient(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Gradient(-1) is not a registered function/op'
    opset: domain='' version=12
    opset: domain='ai.onnx.preview.training' version=1
    input: name='a' type=dtype('float32') shape=[]
    input: name='b' type=dtype('float32') shape=[]
    Add(a, b) -> c
      Mul(c, a) -> d
    Gradient[ai.onnx.preview.training](a, b, xs=b'a',b'b', y=b'd') -> dd_da, dd_db
    output: name='d' type=dtype('float32') shape=[]
    output: name='dd_da' type=dtype('float32') shape=[]
    output: name='dd_db' type=dtype('float32') shape=[].
    
    ======================================================================
    ERROR: test_gradient_of_add_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 87, in __init__
        self.sess = InferenceSession(onnx_data, sess_options=sess_options,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 347, in __init__
        self._create_inference_session(providers, provider_options, disabled_optimizers)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 386, in _create_inference_session
        sess = C.InferenceSession(session_options, self._model_bytes, False, self._read_config_from_model)
    onnxruntime.capi.onnxruntime_pybind11_state.Fail: [ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Gradient(-1) is not a registered function/op
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 268, in create_inference_session
        return OnnxInference(model, runtime='onnxruntime1')
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 227, in _init
        self._whole = OnnxWholeSession(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_whole/session.py", line 92, in __init__
        raise RuntimeError(
    RuntimeError: Unable to create InferenceSession due to '[ONNXRuntimeError] : 1 : FAIL : Fatal error: ai.onnx.preview.training:Gradient(-1) is not a registered function/op'
    opset: domain='' version=12
    opset: domain='ai.onnx.preview.training' version=1
    input: name='a' type=dtype('float32') shape=[]
    input: name='b' type=dtype('float32') shape=[]
    Add(a, b) -> c
    Gradient[ai.onnx.preview.training](a, b, xs=b'a',b'b', y=b'c') -> dc_da, dc_db
    output: name='c' type=dtype('float32') shape=[]
    output: name='dc_da' type=dtype('float32') shape=[]
    output: name='dc_db' type=dtype('float32') shape=[].
    
    ======================================================================
    FAIL: test_bernoulli_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 5 / 10 (50%)
    Max absolute difference: 1.
    Max relative difference: 1.
     x: array([0., 1., 1., 1., 0., 0., 1., 0., 0., 1.])
     y: array([0., 1., 1., 0., 0., 1., 0., 1., 1., 1.])
    
    ======================================================================
    FAIL: test_bernoulli_double_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 8 / 10 (80%)
    Max absolute difference: 1.
    Max relative difference: 1.
     x: array([0., 0., 0., 1., 1., 1., 1., 0., 0., 0.])
     y: array([0., 1., 1., 0., 0., 1., 0., 1., 1., 1.])
    
    ======================================================================
    FAIL: test_bernoulli_double_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 6 / 10 (60%)
    Max absolute difference: 1.
    Max relative difference: 1.
     x: array([0., 0., 0., 0., 0., 0., 1., 1., 0., 0.])
     y: array([0., 1., 1., 0., 0., 1., 0., 1., 1., 1.])
    
    ======================================================================
    FAIL: test_bernoulli_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 7 / 10 (70%)
    Max absolute difference: 1.
    Max relative difference: 1.
     x: array([0., 0., 0., 1., 0., 0., 1., 0., 0., 1.])
     y: array([0., 1., 1., 0., 0., 1., 0., 1., 1., 1.])
    
    ======================================================================
    FAIL: test_bernoulli_seed_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 5 / 10 (50%)
    Max absolute difference: 1.
    Max relative difference: 1.
     x: array([0., 0., 1., 0., 1., 0., 0., 0., 0., 1.], dtype=float32)
     y: array([0., 1., 1., 0., 0., 1., 0., 1., 1., 1.], dtype=float32)
    
    ======================================================================
    FAIL: test_bernoulli_seed_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 5 / 10 (50%)
    Max absolute difference: 1.
    Max relative difference: 1.
     x: array([0., 0., 1., 0., 1., 0., 0., 0., 0., 1.], dtype=float32)
     y: array([0., 1., 1., 0., 0., 1., 0., 1., 1., 1.], dtype=float32)
    
    ======================================================================
    FAIL: test_cast_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 208, in assert_similar_outputs
        np.testing.assert_array_equal(outputs[i], ref_outputs[i])
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 934, in assert_array_equal
        assert_array_compare(operator.__eq__, x, y, err_msg=err_msg,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Arrays are not equal
    
    Mismatched elements: 3 / 12 (25%)
     x: array([['0.9767611', '0.60484552', '0.73926359', '0.039187793'],
           ['0.28280696', '0.12019656', '0.29614019', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
     y: array([['0.9767611', '0.6048455', '0.7392636', '0.039187793'],
           ['0.28280696', '0.12019656', '0.2961402', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
    
    ======================================================================
    FAIL: test_castlike_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 208, in assert_similar_outputs
        np.testing.assert_array_equal(outputs[i], ref_outputs[i])
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 934, in assert_array_equal
        assert_array_compare(operator.__eq__, x, y, err_msg=err_msg,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Arrays are not equal
    
    Mismatched elements: 3 / 12 (25%)
     x: array([['0.9767611', '0.60484552', '0.73926359', '0.039187793'],
           ['0.28280696', '0.12019656', '0.29614019', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
     y: array([['0.9767611', '0.6048455', '0.7392636', '0.039187793'],
           ['0.28280696', '0.12019656', '0.2961402', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
    
    ======================================================================
    FAIL: test_castlike_FLOAT_to_STRING_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 208, in assert_similar_outputs
        np.testing.assert_array_equal(outputs[i], ref_outputs[i])
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 934, in assert_array_equal
        assert_array_compare(operator.__eq__, x, y, err_msg=err_msg,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Arrays are not equal
    
    Mismatched elements: 3 / 12 (25%)
     x: array([['0.9767611', '0.60484552', '0.73926359', '0.039187793'],
           ['0.28280696', '0.12019656', '0.29614019', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
     y: array([['0.9767611', '0.6048455', '0.7392636', '0.039187793'],
           ['0.28280696', '0.12019656', '0.2961402', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
    
    ======================================================================
    FAIL: test_dft_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 10 / 200 (5%)
    Max absolute difference: 0.
    Max relative difference: 6.613e+10
     x: array([[[[ 4.500000e+01,  0.000000e+00],
             [-4.999998e+00,  1.538842e+01],
             [-4.999992e+00,  6.881907e+00],...
     y: array([[[[ 4.500000e+01,  0.000000e+00],
             [-5.000000e+00,  1.538842e+01],
             [-5.000000e+00,  6.881909e+00],...
    
    ======================================================================
    FAIL: test_dft_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 10 / 200 (5%)
    Max absolute difference: 0.
    Max relative difference: 4.689e+09
     x: array([[[[ 4.500000e+02,  0.000000e+00],
             [ 4.600000e+02,  0.000000e+00],
             [ 4.700000e+02,  0.000000e+00],...
     y: array([[[[ 4.500000e+02,  0.000000e+00],
             [ 4.600000e+02,  0.000000e+00],
             [ 4.700000e+02,  0.000000e+00],...
    
    ======================================================================
    FAIL: test_dft_inverse_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 10 / 200 (5%)
    Max absolute difference: 4.101e-05
    Max relative difference: 4.689e+09
     x: array([[[[ 4.500000e+01,  0.000000e+00],
             [ 4.600000e+01,  0.000000e+00],
             [ 4.700000e+01,  0.000000e+00],...
     y: array([[[[ 4.500000e+01,  0.000000e+00],
             [ 4.600000e+01,  0.000000e+00],
             [ 4.700000e+01,  0.000000e+00],...
    
    ======================================================================
    FAIL: test_maxunpool_export_with_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 8 / 25 (32%)
    Max absolute difference: 8.
    Max relative difference: 1.
     x: array([[[[0., 0., 0., 0., 0.],
             [5., 0., 6., 0., 0.],
             [0., 0., 0., 7., 0.],...
     y: array([[[[0., 0., 0., 0., 0.],
             [0., 5., 0., 6., 0.],
             [0., 0., 0., 0., 0.],...
    
    ======================================================================
    FAIL: test_stft_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 15 / 270 (5.56%)
    Max absolute difference: 8.393e-05
    Max relative difference: 8.465e-06
     x: array([[[[ 1.200000e+02,  0.000000e+00],
             [-7.999993e+00,  4.021872e+01],
             [-7.999995e+00,  1.931371e+01],...
     y: array([[[[ 1.200000e+02,  0.000000e+00],
             [-8.000000e+00,  4.021872e+01],
             [-8.000000e+00,  1.931371e+01],...
    
    ======================================================================
    FAIL: test_stft_with_window_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 15 / 270 (5.56%)
    Max absolute difference: 6.104e-05
    Max relative difference: 3.354e-05
     x: array([[[[ 5.599627e+01,  0.000000e+00],
             [ 2.399911e+01,  2.493398e+01],
             [-7.998686e+00,  2.270421e+01],...
     y: array([[[[ 55.996273,   0.      ],
             [ 23.999105,  24.93398 ],
             [ -7.99869 ,  22.70421 ],...
    
    ======================================================================
    FAIL: test_training_dropout_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 23 / 60 (38.3%)
    Max absolute difference: 10.212
    Max relative difference: 1.
     x: array([[[ 0.      ,  0.      ,  3.914952,  0.      ,  0.      ],
            [-0.      ,  0.      , -0.      , -0.      ,  1.642394],
            [ 0.      ,  0.      ,  3.044151,  0.      ,  0.      ],...
     y: array([[[  0.      ,   0.      ,   0.      ,   0.      ,   0.      ],
            [ -0.      ,   0.      ,  -0.605429,  -0.412875,   0.      ],
            [  0.576174,   0.      ,   0.      ,   0.4867  ,   0.      ],...
    
    ======================================================================
    FAIL: test_training_dropout_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 32 / 60 (53.3%)
    Max absolute difference: 5.106
    Max relative difference: 1.
     x: array([[[ 0.      ,  0.      ,  1.957476,  0.      ,  3.735116],
            [-0.      ,  0.      , -0.302714, -0.206438,  0.821197],
            [ 0.      ,  2.908547,  1.522075,  0.      ,  0.      ],...
     y: array([[[ 3.528105,  0.800314,  1.957476,  4.481786,  0.      ],
            [-1.954556,  0.      , -0.302714, -0.206438,  0.      ],
            [ 0.288087,  2.908547,  1.522075,  0.24335 ,  0.      ],...
    
    ======================================================================
    FAIL: test_training_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 32 / 60 (53.3%)
    Max absolute difference: 5.106
    Max relative difference: 1.
     x: array([[[ 0.      ,  0.      ,  1.957476,  0.      ,  3.735116],
            [-0.      ,  0.      , -0.302714, -0.206438,  0.821197],
            [ 0.      ,  2.908547,  1.522075,  0.      ,  0.      ],...
     y: array([[[ 3.528105,  0.800314,  1.957476,  4.481786,  0.      ],
            [-1.954556,  0.      , -0.302714, -0.206438,  0.      ],
            [ 0.288087,  2.908547,  1.522075,  0.24335 ,  0.      ],...
    
    ======================================================================
    FAIL: test_training_dropout_mask_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 23 / 60 (38.3%)
    Max absolute difference: 10.212
    Max relative difference: 1.
     x: array([[[ 0.      ,  0.      ,  3.914952,  0.      ,  0.      ],
            [-0.      ,  0.      , -0.      , -0.      ,  1.642394],
            [ 0.      ,  0.      ,  3.044151,  0.      ,  0.      ],...
     y: array([[[  0.      ,   0.      ,   0.      ,   0.      ,   0.      ],
            [ -0.      ,   0.      ,  -0.605429,  -0.412875,   0.      ],
            [  0.576174,   0.      ,   0.      ,   0.4867  ,   0.      ],...
    
    ----------------------------------------------------------------------
    Ran 2492 tests in 40.216s
    
    FAILED (failures=19, errors=352, skipped=1254)
    [runpythonerror]
    2023-02-04 07:11:16.736238395 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:16.736661501 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:16.741653360 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:16.742058336 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:16.767103759 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:16.767496845 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:18.405893211 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:18.406289467 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:18.411854590 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:18.412273156 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:18.417331724 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:18.417723880 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:18.422703779 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:18.423111235 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:19.332409760 [E:onnxruntime:, inference_session.cc:1500 operator()] Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/deep_cpu_gru.h:55 onnxruntime::DeepCpuGruOp::DeepCpuGruOp(const onnxruntime::OpKernelInfo&) layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    
    2023-02-04 07:11:19.612992505 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:19.613393631 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:19.618325851 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:19.618715577 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:19.651737849 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:19.652134715 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:19.756084570 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:19.756551255 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:19.762329736 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:19.762803531 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:23.179731297 [E:onnxruntime:, inference_session.cc:1500 operator()] Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/lstm_base.h:52 onnxruntime::LSTMBase::LSTMBase(const onnxruntime::OpKernelInfo&) layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    
    2023-02-04 07:11:27.483641157 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:27.484024153 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:27.489527237 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:27.489928203 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:27.495483476 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:27.495886862 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:33.912954314 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:33.913372880 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:33.918304869 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:33.918701995 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:34.007976950 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:34.008382086 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:35.021960933 [E:onnxruntime:, inference_session.cc:1500 operator()] Exception during initialization: /onnxruntime_src/onnxruntime/core/providers/cpu/rnn/rnn.h:45 onnxruntime::RNN<T>::RNN(const onnxruntime::OpKernelInfo&) [with T = float] layout_ == 0 was false. Batchwise recurrent operations (layout == 1) are not supported. If you need support create a github issue with justification.
    
    2023-02-04 07:11:36.512309236 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:36.512715542 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:36.517576792 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:36.517975838 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:36.524117595 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:36.524614190 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:36.541670125 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:36.542153290 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:36.556899029 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:36.557301305 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:36.562787459 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 1 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:36.563196415 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.771704652 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.772262116 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.798014192 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.798566396 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.817559422 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.818021847 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.841451477 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.841911333 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.857485663 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.857945708 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.863056386 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.863512621 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.868533280 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.868997535 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.875293810 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.875803115 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.900951718 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.901459453 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.911364481 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.911872666 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.921764404 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.922273019 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.932165578 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.932673403 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.941110706 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.941566041 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.948229993 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.948731538 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.955999033 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.956503898 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.963209130 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.963697955 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.970917401 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.971408596 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.978062198 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.978544833 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.985185735 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.985658440 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:40.992352901 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:40.992820637 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.000040843 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.000520578 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.007216769 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.007677494 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.037836156 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.038298661 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.045337579 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.045784474 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.053381906 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.053842702 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.378978530 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.379503855 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.386369335 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.386904959 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.393893578 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.394399162 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.401192153 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.401705288 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.408591407 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.409089792 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.416297008 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.416784303 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.424415005 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.424906380 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.432005957 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.432484403 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.439233803 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.439708258 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.446457169 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.446945324 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.454233750 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.454704005 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.461068429 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.461512265 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.468818260 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.469281176 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.475985077 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.476437092 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.483902486 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.484362981 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.490875564 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.491315250 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.497154790 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.497555836 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.503431286 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.503900661 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:41.509267776 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:41.509732091 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.100060914 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.100608278 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.106698796 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.107289019 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.113150079 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.113606845 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.118812641 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.119269897 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.125575593 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.126070598 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.145609177 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.146274950 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.151471007 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.151909043 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.157846512 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.158321057 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.163764791 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.164219016 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.190485977 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.190961682 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.228483958 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.228923054 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.234989881 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.235433807 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.240900351 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.241331786 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.247280225 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.247698961 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.261158313 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.261567329 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.277165490 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.277582625 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.305236122 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.305652218 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.321292167 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.321712513 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.333219605 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.333639961 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.360588345 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.361079330 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.376878828 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.377272244 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.382507620 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.382935026 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.388302611 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.388710217 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.394185170 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.394588836 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.400102970 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.400498796 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.405546974 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.405939620 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.948069817 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.948562892 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.953942896 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.954385902 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.960251212 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.960809856 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.981008359 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.981437015 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.986861759 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.987312025 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.993024286 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.993474442 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:42.998607299 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:42.999056154 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.004797306 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.005238551 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.010397368 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.010840244 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.016328247 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.016761383 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.033374213 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.033808458 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.053370348 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.053799033 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.065380235 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.065815641 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.081388101 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.081844086 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.102257347 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.102809091 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.117604620 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.118157244 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.137326048 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.137771793 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.143889711 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.144309596 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.150044318 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.150476733 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.156930527 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.157396222 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.223426866 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.223890951 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.228894480 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.229282856 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.234235725 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.234632051 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.240189664 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.240637659 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.247035274 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.247443260 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.252952593 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.253361259 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.259359977 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.259749593 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.812313023 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.812811098 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.818370381 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.818927875 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.825458048 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.825932214 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.831844213 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.832446627 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.849364403 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.849824289 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.855023876 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.855478581 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.877269898 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.877721363 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.883050799 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.883502774 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.889279005 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.889735380 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.895019516 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.895461301 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.913411647 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.913827593 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.918699493 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.919136909 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.925356765 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.925812061 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.932775449 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.933247074 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:43.941463450 [W:onnxruntime:, model.cc:180 Model] ONNX Runtime only *guarantees* support for models stamped with opset version 7 or above for opset domain 'ai.onnx'. Please upgrade your model to opset 7 or higher. For now, this opset 6 model may run depending upon legacy support of some older opset version operators.
    2023-02-04 07:11:43.941873986 [W:onnxruntime:, ort_transpose_optimizer.cc:24 ApplyImpl] Transpose optimizer failed: Unsupported ONNX opset
    2023-02-04 07:11:46.372818633 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.372866972 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_at appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.380764411 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_erase appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.380810011 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_at appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.389259205 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_erase appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.389307584 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_insert appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.389329024 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_at appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.
    2023-02-04 07:11:46.416099279 [W:onnxruntime:, graph.cc:1231 Graph] Initializer pos_at appears in graph inputs and will not be treated as constant value/weight. This may prevent some of the graph optimizations, like const folding. Move it out of graph inputs if there is no need to override it, by either re-generating the model with latest exporter/converter or with the tool onnxruntime/tools/python/remove_initializer_from_input.py.